Skip to content

Commit

Permalink
Merge pull request #48 from crshanks/crshanks-current
Browse files Browse the repository at this point in the history
Merging a series of updates and fixes from crshanks / crshanks-current branch.
  • Loading branch information
crshanks authored Oct 17, 2024
2 parents d0daffb + 567f932 commit 1e52301
Show file tree
Hide file tree
Showing 17 changed files with 1,352 additions and 33 deletions.
21 changes: 16 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ During migratepolicies the stored alert_channels can be used by passing --useLoc

#### 4) python3 migratemonitors.py

`usage: migratemonitors.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] --timeStamp TIMESTAMP [--useLocal] [--minionMappingFile]`
`usage: migratemonitors.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] --timeStamp TIMESTAMP [--useLocal] [--minionMappingFile MINIONMAPPINGFILE]`

Parameter | Note
------------- | --------------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -326,19 +326,30 @@ usage: migrate_apm.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourc

#### 8) python3 migrate_dashboards.py

usage: migrate_dashboards.py [-h] --fromFile FROMFILE --sourceAccount [--sourceRegion SOURCEREGION]
`usage: migrate_dashboards.py [-h] --fromFile FROMFILE --sourceAccount [--sourceRegion SOURCEREGION]
SOURCEACCOUNT --sourceApiKey SOURCEAPIKEY
--targetAccount TARGETACCOUNT [--targetRegion TARGETREGION]
[--targetApiKey TARGETAPIKEY]
[--targetApiKey TARGETAPIKEY] [--accountMappingFile ACCOUNTMAPPINGFILE]`

Migrate dashboards between accounts, including modifying queries to point to the new target account. The fetchentities.py script can help create the file to pass with fromFile.

Parameter | Note
------------- | --------------------------------------------------------------------------------------------------------
fromFile | Must contain dashboard names one per line. The fetchentities.py script can be used to help generate this list of dashboards.
sourceAccount | Account to fetch dashboards from
sourceRegion | Optional region us (default) or eu
sourceApiKey | This should be a User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Dashboards
targetAccount | Account to migrate monitors to
targetRegion | Optional region us (default) or eu
targetApiKey | This should be a User API Key for targetAccount for a user with admin (or add on / custom role equivalent) access to Dashboards
accountMappingFile | Map account ids to alternatives using a dictionary in a [JSON file](account_mapping.json). Useful when moving between regions, e.g. from the us to eu region.

#### 9) python3 migratetags.py

usage: migratetags.py [-h] --fromFile FROMFILE --sourceAccount
`usage: migratetags.py [-h] --fromFile FROMFILE --sourceAccount
SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY
--targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --targetApiKey TARGETAPIKEY
[--apm --browser --dashboards --infrahost --infraint --lambda --mobile --securecreds --synthetics]
[--apm --browser --dashboards --infrahost --infraint --lambda --mobile --securecreds --synthetics]`

Migrate entity tags between entities with matching names and entity types.

Expand Down
4 changes: 4 additions & 0 deletions account_mapping.json.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"1234567": "9876543",
"2345678": "8765432"
}
15 changes: 9 additions & 6 deletions fetchentities.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def configure_parser():
help='Pass --workloads to list matching Workload entities')
parser.add_argument('--tagName', nargs=1, required=False, help='(Optional) Tag name to use when filtering results. Required if --tagValue is passed.')
parser.add_argument('--tagValue', nargs=1, required=False, help='(Optional) Tag value to use when filtering results. Required if --tagName is passed.')
parser.add_argument('--assessment', dest='assessment', required=False, action='store_true', help='Pass --assessment to prefix entities, with account id')
return parser


Expand Down Expand Up @@ -71,17 +72,19 @@ def parse_entity_types(args):


def fetch_entities(src_account_id, src_api_key, entity_types, output_file, *,
tag_name=None, tag_value=None, src_region='us'):
tag_name=None, tag_value=None, src_region='us', assessment=None):
entity_names = []
for entity_type in entity_types:
entities = ec.gql_get_entities_by_type(src_api_key, entity_type, src_account_id, tag_name, tag_value, src_region)
for entity in entities['entities']:
entity_names.append(entity['name'])
entity_names.append(store.sanitize(entity['name']))
entity_names_file = store.create_output_file(output_file)
with entity_names_file.open('a') as entity_names_out:
for entity_name in entity_names:
name = store.sanitize(entity_name)
entity_names_out.write(name + "\n")
if assessment:
entity_names_out.write(src_account_id + "," + entity_name + "\n")
else:
entity_names_out.write(entity_name + "\n")
entity_names_out.close()
logger.info("Wrote %s entities to file %s",len(entity_names), output_file)

Expand All @@ -106,10 +109,10 @@ def main():
src_region = utils.ensure_source_region(args)
print_params(args, src_api_key, entity_types, src_region)
if args.tagName is None:
fetch_entities(args.sourceAccount[0], src_api_key, entity_types, args.toFile[0], src_region=src_region)
fetch_entities(args.sourceAccount[0], src_api_key, entity_types, args.toFile[0], src_region=src_region, assessment=args.assessment)
else:
fetch_entities(args.sourceAccount[0], src_api_key, entity_types, args.toFile[0], tag_name=args.tagName[0],
tag_value=args.tagValue[0], src_region=src_region)
tag_value=args.tagValue[0], src_region=src_region, assessment=args.assessment)


if __name__ == '__main__':
Expand Down
8 changes: 4 additions & 4 deletions fetchmonitors.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,16 +79,16 @@ def populate_secure_credentials(monitor_json, src_account, insights_key, region)


def fetch_monitors(api_key, account_id, output_file, insights_key='', region='us'):
timestamp = time.strftime("%Y-%m%d-%H%M%S")
storage_dir = store.create_storage_dirs(account_id, timestamp)
monitor_names_file = store.create_output_file(output_file)
all_monitors_def_json = mc.fetch_all_monitors(api_key, region)
monitors_count = len(all_monitors_def_json)
if monitors_count <= 0:
logger.warn("No monitors found in account " + account_id)
sys.exit()
return timestamp
else:
logger.info("Monitors returned %d", monitors_count)
timestamp = time.strftime("%Y-%m%d-%H%M%S")
storage_dir = store.create_storage_dirs(account_id, timestamp)
monitor_names_file = store.create_output_file(output_file)
with monitor_names_file.open('a') as monitor_names_out:
for monitor_def_json in all_monitors_def_json:
monitor_json = {'definition': monitor_def_json}
Expand Down
98 changes: 98 additions & 0 deletions fetchnotifications.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import argparse
import os
import json
import library.migrationlogger as nrlogger
import library.clients.notificationsclient as notificationsclient
import library.localstore as store
import library.utils as utils

logger = nrlogger.get_logger(os.path.basename(__file__))
nc = notificationsclient.NotificationsClient()


def configure_parser():
parser = argparse.ArgumentParser(description='Fetch and store notifications')
parser.add_argument('--account', nargs=1, type=str, required=False, help='Account ID')
parser.add_argument('--accounts', nargs=1, type=str, required=False, help='Path to file with account IDs')
parser.add_argument('--userApiKey', nargs=1, type=str, required=True, help='User API Key')
parser.add_argument('--region', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu')
parser.add_argument('--destinations', dest='destinations', required=False, action='store_true', help='Query destinations')
parser.add_argument('--channels', dest='channels', required=False, action='store_true', help='Query channels')
return parser


def fetch_destinations(user_api_key, account_id, region, accounts_file=None):
destinations_by_id = get_config(nc.destinations, user_api_key, account_id, region, accounts_file)
return destinations_by_id


def fetch_channels(user_api_key, account_id, region, accounts_file=None):
channels_by_id = get_config(nc.channels, user_api_key, account_id, region, accounts_file)
return channels_by_id


def get_config(func, user_api_key, account_id, region, accounts_file):
acct_ids = []
if account_id:
acct_ids = [account_id]
else:
acct_ids = store.load_names(accounts_file)
configs_by_id = {}
# Strip the class name
field = func.__name__
for acct_id in acct_ids:
done = False
cursor = None
while not done:
try:
response = nc.query(func, user_api_key, int(acct_id), region, cursor)
logger.debug(json.dumps(response))
config = response['response']['data']['actor']['account']['aiNotifications'][field]['entities']
cursor = response['response']['data']['actor']['account']['aiNotifications'][field]['nextCursor']
if ('error' in response):
logger.error(f'Could not fetch destinations for account {acct_id}')
logger.error(response['error'])
break
error = response['response']['data']['actor']['account']['aiNotifications'][field]['error']
if (error is not None):
logger.error(f'Could not fetch destinations for account {acct_id}')
logger.error(error)
break
if cursor is None:
done = True
except:
logger.error(f'Error querying {field} for account {acct_id}')
else:
account_configs_by_id = {}
for element in config:
element['accountId'] = acct_id
configs_by_id.setdefault(element['id'], element)
account_configs_by_id.setdefault(element['id'], element)
if field == 'destinations':
store.save_notification_destinations(acct_id, account_configs_by_id)
if field == 'channels':
store.save_notification_channels(acct_id, account_configs_by_id)
logger.info(configs_by_id)
store.save_config_csv(field, configs_by_id)
return configs_by_id


def main():
parser = configure_parser()
args = parser.parse_args()
user_api_key = utils.ensure_user_api_key(args)
if not user_api_key:
utils.error_and_exit('userApiKey', 'ENV_USER_API_KEY')
region = utils.ensure_region(args)
account_id = args.account[0] if args.account else None
accounts_file = args.accounts[0] if args.accounts else None
if args.destinations:
fetch_destinations(user_api_key, account_id, region, accounts_file)
elif args.channels:
fetch_channels(user_api_key, account_id, region, accounts_file)
else:
logger.info("pass [--destinations | --channels] to fetch configuration")


if __name__ == '__main__':
main()
79 changes: 79 additions & 0 deletions fetchworkflows.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import argparse
import os
import json
import library.migrationlogger as nrlogger
import library.clients.workflowsclient as workflowsclient
import library.localstore as store
import library.utils as utils

logger = nrlogger.get_logger(os.path.basename(__file__))
wc = workflowsclient.WorkflowsClient()


def configure_parser():
parser = argparse.ArgumentParser(description='Fetch and store workflows')
parser.add_argument('--account', nargs=1, type=str, required=False, help='Account ID')
parser.add_argument('--accounts', nargs=1, type=str, required=False, help='Path to file with account IDs')
parser.add_argument('--userApiKey', nargs=1, type=str, required=True, help='User API Key')
parser.add_argument('--region', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu')
return parser


def fetch_workflows(user_api_key, account_id, region, accounts_file=None):
workflow_by_source_id = get_config(wc.workflows, user_api_key, account_id, region, accounts_file)
return workflow_by_source_id


def get_config(func, user_api_key, account_id, region, from_file):
acct_ids = []
if account_id:
acct_ids = [account_id]
else:
acct_ids = store.load_names(from_file)
configs_by_id = {}
# Strip the class name
field = func.__name__
for acct_id in acct_ids:
done = False
cursor = None
while not done:
try:
response = wc.query(func, user_api_key, int(acct_id), region, cursor)
logger.debug(json.dumps(response))
config = response['response']['data']['actor']['account']['aiWorkflows'][field]['entities']
cursor = response['response']['data']['actor']['account']['aiWorkflows'][field]['nextCursor']
if ('error' in response):
logger.error(f'Could not fetch workflows for account {acct_id}')
logger.error(response['error'])
break
# No error attribute for aiWorkflows
if cursor is None:
done = True
except:
logger.error(f'Error querying {field} for account {acct_id}')
else:
account_configs_by_id = {}
for element in config:
element['accountId'] = acct_id
configs_by_id.setdefault(element['id'], element)
account_configs_by_id.setdefault(element['id'], element)
store.save_workflows(acct_id, account_configs_by_id)
logger.info(configs_by_id)
store.save_config_csv(field, configs_by_id)
return configs_by_id


def main():
parser = configure_parser()
args = parser.parse_args()
user_api_key = utils.ensure_user_api_key(args)
if not user_api_key:
utils.error_and_exit('userApiKey', 'ENV_USER_API_KEY')
region = utils.ensure_region(args)
account_id = args.account[0] if args.account else None
accounts_file = args.accounts[0] if args.accounts else None
fetch_workflows(user_api_key, account_id, region, accounts_file)


if __name__ == '__main__':
main()
11 changes: 9 additions & 2 deletions library/clients/entityclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,10 +649,12 @@ def tags_diff(src_tags, tgt_tags):
match_found = True
continue
if match_found == False:
tags_arr.append(src_tag)
if (src_tag['key'].startswith('nr.')):
logger.debug(f'Skipping reserved key: {src_tag}')
else:
tags_arr.append(src_tag)
return tags_arr


def mutate_tags_payload(entity_guid, arr_tags, mutate_action):
apply_tags_query = '''mutation($entityGuid: EntityGuid!, $tags: [TaggingTagInput!]!)
{''' + mutate_action + '''(guid: $entityGuid, tags: $tags) {
Expand Down Expand Up @@ -950,10 +952,15 @@ def get_nrql_condition_payload(account_id, condition_id):
policyId
runbookUrl
signal {
aggregationDelay
aggregationMethod
aggregationTimer
aggregationWindow
evaluationDelay
evaluationOffset
fillOption
fillValue
slideBy
}
terms {
operator
Expand Down
Loading

0 comments on commit 1e52301

Please sign in to comment.