Skip to content

Commit

Permalink
First commit, easy Dynatrace locations manager
Browse files Browse the repository at this point in the history
  • Loading branch information
Forcebyte committed Oct 31, 2021
1 parent 7081909 commit 3a32497
Show file tree
Hide file tree
Showing 6 changed files with 237 additions and 2 deletions.
53 changes: 51 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,51 @@
# dynatrace-privatelocation-sync
Ad-Hoc Python script that syncs Synthetic nodes to locations based on metadata
<h4 align="center">
<img alt="header pic" src="src/dynatrace_logo.png">
</h4>

# Dynatrace Private Synthetic Locations Sync


At the moment, Dynatrace does not support any sembience of linking a private synthetics agent to a 'location' at runtime - this is a huge problem in a majority of cases (including where you run these sort of agents in a scaled manner, each Private instance you scale out to must be manually registered to a location, which is both annoying and problematic at scale)

this script was made to attempt to combat that, and allows for Private Synthetics Locations to be automatically updated and definedbased on specific metadata that the node reports in the [**Synthetic Node API**](https://www.dynatrace.com/support/help/dynatrace-api/environment-api/synthetic/synthetic-nodes/get-node/) - Today this includes

- IP Block type (E.g. IP Prefix)
- Synthetic Node Name

## Quickstart

By default, the script expects the following
- Environment Variables `dynatracetoken` and `dynatracetenant` are set
- Proper metadata provided in the `locations` folder exists

a easy quickstart would be to define these locally, and run the example script

```bash
export dynatracetoken='mycooltoken'
export dynatracetenant='isa2131'
python3 locationsManager.py
```

## Adding/Removing Definitions

Definitions of a Private Synthetic Location group are managed within the 'locations' folder of this repository - when parsing through this folder the script will
- Fetch all files within the folder
- Dynamically pull 'SyntheticData' configuration from each folder

Definitions can be grouped in whatever way you wish, ideally i'd recommend grouping them based on 'environment' (so eng, nonp, and prod respectively)

```yaml
metadata:
# Dictates whether we parse over the file
Active: True
Name: "Production Location Metadata"
Type: ipBlock

syntheticData:
# Address we look for
- '172.16':
# Custom prefix name (only used in logging)
prefixName: 'Private 172 Address space'
# This is the Synthetic Location ID as it appears in the API
syntheticLocation: 'SYNTHETIC_LOCATION-AAAA'
```
15 changes: 15 additions & 0 deletions locations/example-locationblock.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
metadata:
Active: True
Name: "Production Location Metadata"
Type: ipBlock

syntheticData:
- '172.16':
prefixName: 'Private 172 Address space'
# This is the Synthetic Location ID as it appears in the API
syntheticLocation: 'SYNTHETIC_LOCATION-AAAA'

- '192.168':
prefixName: 'Private 192 Address space'
# This is the Synthetic Location ID as it appears in the API
syntheticLocation: 'SYNTHETIC_LOCATION-BBBB'
13 changes: 13 additions & 0 deletions locationsManager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import logging

# Initialize Logger
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
logger.propagate = False

from manager import parser, dynatrace_utils

if __name__ == '__main__':
args = parser.LocationArguments()
location_management = dynatrace_utils.locationsManager(args)
location_management.parse_metadata()
81 changes: 81 additions & 0 deletions manager/dynatrace_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import requests
import logging
import sys
from collections import defaultdict

# Substantiate logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler(sys.stdout))

class locationsManager():
"""
locationsManager - Primary handler for Deployment of Locations API
"""

def __init__(self, args):
self.dynatrace_tenant = args.dyantrace_tenant
# NOTE: These are based on Dynatrace Cloud, change these URL formats if you are using Managed SaaS
self.dynatraceURL = {
'node': f"https://{self.dynatrace_tenant}.live.dynatrace.com/api/v1/synthetic/nodes",
'location': f"https://{self.dynatrace_tenant}.live.dynatrace.com/api/v1/synthetic/nodes",
'default': f"https://{self.dynatrace_tenant}.live.dynatrace.com"
}
self.dynatracecredentals = args.dynatracecredentials
self.metadict = args.metadict

def parse_metadata(self):
"""
parse_metadata - this function fetches each Private Synthetic Location + Node and returns a list of nodes to update
"""
nodes_to_update = defaultdict(list)
node_information = self.__fetch_node_block()
for node in node_information['nodes']:
logger.info(f"Finding IpBlock for Synthetic Location to {node['hostname']}")
for ip_block in node['ips']:
item = self.__parse_ipblock(ip_block)
if item:
if item.get('syntheticLocation'):
logger.info(f"Node IP {ip_block} is in block itme {item['prefixName']}")
nodes_to_update[item['syntheticLocation']].append(node['entityId'])
# Once all the nodes are identified, return the list and patch all synthetic locatiosn
self.__patch_synthetic_location(nodes_to_update)

def __parse_ipblock(self, ip_block):
for block in self.metadict:
# if we notice the block is part of the IP
if '.'.join(ip_block.split('.'[0:2])) in block.keys():
logger.debug(f"IpBlock is in {block['prefixName']} - Adding synthetic agent list to update")
return(block)
else:
logger.debug(f"IpBlock is not in {block['prefixName']} - continuing")

def __fetch_node_block(self):
# Static function that fetches all synthetic nodes, and returns them in dict format
item = requests.get(
url=self.dynatraceURL['node'],
headers={ 'Authorization': f"Api-Token {self.dynatracecredentals['token']}", 'Content-Type': 'application/json'}
)
return item.json()

def __fetch_synthetic_location(self, location_name):
item = requests.get(
url=f"{self.dynatraceURL['location']}/{location_name}",
headers={ 'Authorization': f"Api-Token {self.dynatracecredentals['token']}", 'Content-Type': 'application/json'}
)
return item.json()

def __patch_synthetic_location(self, nodes_to_update):
# adds a PUT to synthetic nodes, returns them in a dict format
for synthetic_location, nodes in nodes_to_update.items():
# first, fetch the existing synthetic location ID to get the existing metadata to include in PUT request
synthetic_location_data = self.__fetch_synthetic_location(synthetic_location)
logger.info(f"Updating {synthetic_location} with nodes {nodes}")
synthetic_location_data['nodes'] = nodes
item = requests.put(
url=f"{self.dynatraceURL['location']}/{synthetic_location}",
headers={ 'Authorization': f"Api-Token {self.dynatracecredentals['token']}", "Content-Type": "application/json"},
json=synthetic_location_data
)
item.raise_for_status()
77 changes: 77 additions & 0 deletions manager/parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import os
import logging
import sys
import yaml

# Fetch StreamHandler from root

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler(sys.stdout))

def is_local():
# Small function to determine if we're running locally
if os.environ.get('local'):
return True
else:
return False

class LocationArguments():
""" Primary Location Argument Definitions
Parses all YAML files within a specific directory, along with local arguments, and returns a classful definition of all arguments
"""
def __init__(self):
self.is_local = is_local()
self.__parse_dir_arguments()
self.__parse_meta_config()
self.dynatrace_credentials = {
'token': os.environ.get('dynatracetoken', '')
}
self.dyantrace_tenant = os.environ.get('dynatracetenant', 'replaceme')

# Warn about any missing vars
requiredVars = ['dynatracetoken', 'dynatracetenant']
for var in requiredVars:
if os.environ.get(var) is None:
logger.warning(f"Environment variable {var} appears to be missing - attempting to use default value")

def __parse_dir_arguments(self):
# Static DIR arguments, determines the location that we parse YAML files upon
self.metapath = './locations'

def __parse_meta_config(self):
"""
__parse_meta_config - Generates a large 'meta' dict in response to parsing each of the metadata files
"""
metadata_files = []
try:
for dirName, subdirList, fileList in os.walk(self.metapath):
for file in fileList:
if file.endswith('.yaml'):
logger.info(f"Found MetaData File: {file}")
metadata_files.append(file)
except Exception as err:
if self.is_local:
logger.error(err)
raise ValueError(f"Unable to parse directory {self.metapath} - raising error")
else:
logger.error(err)
logger.warning(f"Unable to parse {self.metapath} - skipping...")

# Now that we have a generic listing of each metadata path, parse each of those files into a single config dict
config = []
for file in metadata_files:
with open(f"{self.metapath}/{file}", 'r') as stream:
try:
localconfig = yaml.safe_load(stream)
if localconfig['metadata']['Active']:
config.extend(localconfig.pop('syntheticData'))
else:
logger.info(f"Locations file {file} is not marked as 'active' under metadata -> Active, ignoring")
except yaml.YAMLError as err:
logger.error(err)
logger.warning(f"Unable to parse {self.metapath}/{file} - skipping...")
self.metadict = config
Binary file added src/dynatrace_logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 3a32497

Please sign in to comment.