From 1ba7ab27120efc20bd4ebe56a111cf1d1603d5df Mon Sep 17 00:00:00 2001 From: Craig Shanks <44571906+crshanks@users.noreply.github.com> Date: Tue, 17 Dec 2024 21:16:04 +0000 Subject: [PATCH] Fix migrate_notifications.py script (#55) --- README.md | 187 ++++++++++++++++++++++----------------- migrate_account.py | 26 ++++-- migrate_notifications.py | 105 ++++++++++++++++++++-- migrate_workflows.py | 126 -------------------------- 4 files changed, 220 insertions(+), 224 deletions(-) delete mode 100644 migrate_workflows.py diff --git a/README.md b/README.md index c5d6cc4..4ddecd3 100644 --- a/README.md +++ b/README.md @@ -66,14 +66,15 @@ The details for each script is provided in the next Usage section. | No. | Use Case | Scripts | | --- | -------------------------- | ------------------- | | 1. | Migrate Monitors | fetchmonitors.py :arrow_right: migratemonitors.py :arrow_right: migratetags.py | -| 2. | Migrate Alert policies | fetchchannels.py(optional) :arrow_right: migratepolicies.py | -| 3. | Migrate Alert conditions | migratepolicies.py :arrow_right: migrateconditions.py | -| 4. | Migrate APM Configurations | migrate_apm.py :arrow_right: migratetags.py | -| 5. | Migrate Dashboards | migrate_dashboards.py :arrow_right: migratetags.py | -| 6. | Update Monitors | updatemonitors.py | -| 7. | Delete Monitors | deletemonitors.py | -| 8. | Migrate Tags | migratetags.py | -| 9. | Update Workload Golden Signals | wlgoldensignals.py | +| 2. | Migrate Alert policies | fetchchannels.py(optional) :arrow_right: store_policies.py :arrow_right: migratepolicies.py | +| 3. | Migrate Alert conditions | store_policies.py :arrow_right: migratepolicies.py :arrow_right: migrateconditions.py | +| 4. | Migrate Alert notifications | store_policies.py :arrow_right: migratepolicies.py :arrow_right: migrateconditions.py :arrow_right: migrate_notifications.py | +| 5. | Migrate APM Configurations | migrate_apm.py :arrow_right: migratetags.py | +| 6. | Migrate Dashboards | migrate_dashboards.py :arrow_right: migratetags.py | +| 7. | Update Monitors | updatemonitors.py | +| 8. | Delete Monitors | deletemonitors.py | +| 9. | Migrate Tags | migratetags.py | +| 10. | Update Workload Golden Signals | wlgoldensignals.py | The following entities and configurations can be migrated: @@ -90,7 +91,7 @@ The following entities and configurations can be migrated: | ----------- | ------------------------- | - [x] Alert Policies and related notification channels -- [x] Notification channels (tested for email, webhook, pagerduty, opsgenie) - credentials populated with dummy values +- [x] Notification destinations, channels, and workflows (tested for email, webhook, pagerduty, opsgenie) - credentials populated with dummy values | Config Type | Alert Conditions | | ----------- | ----------------- | @@ -122,9 +123,9 @@ APM Configuration ``` usage: fetchmonitors.py --sourceAccount SOURCEACCOUNT --region [ us (default) |eu ] - --sourceApiKey SOURCEAPIKEY - --insightsQueryKey INSIGHTSQUERYKEY - --toFile TOFILE + --sourceApiKey SOURCEAPIKEY + --insightsQueryKey INSIGHTSQUERYKEY + --toFile TOFILE ``` Parameter | Note @@ -140,9 +141,9 @@ toFile | should only be a file name e.g. soure-monitors.csv. It will a #### 3) python3 fetchchannels.py (optional if you want to use --useLocal option during migratepolicies) - -`usage: fetchchannels.py --sourceAccount SOURCEACCOUNT [--sourceApiKey SOURCEAPIKEY] --region [ us (default) |eu ]` - +``` +usage: fetchchannels.py --sourceAccount SOURCEACCOUNT [--sourceApiKey SOURCEAPIKEY] --region [ us (default) |eu ] +``` Fetches alert channels and builds a dictionary mapping channels to policy_id. The channels are stored in db/accountId/alert_policies/alert_channels.json @@ -150,8 +151,9 @@ The channels are stored in db/accountId/alert_policies/alert_channels.json During migratepolicies the stored alert_channels can be used by passing --useLocal #### 4) python3 migratemonitors.py - -`usage: migratemonitors.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] --timeStamp TIMESTAMP [--useLocal]` +``` +usage: migratemonitors.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] --timeStamp TIMESTAMP [--useLocal] +``` Parameter | Note ------------- | -------------------------------------------------------------------------------------------------------- @@ -181,15 +183,23 @@ Comma separated status for each migrated monitor as below. A value of 0 CHECK_COUNT for scripted monitors indicates it has not run in the past 7 days. -#### 5) python3 migratepolicies.py +#### 5) python3 store_policies.py -`usage: migratepolicies.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--useLocal]` +``` +usage: store_policies.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY +``` +Saves all alert polices in db/\/alert_policies/alert_policies.json and output/\_policies.csv; the latter is required as input for migratepolicies.py as the --fromFile argument. + +#### 6) python3 migratepolicies.py +Preconditions: store_policies.py. +``` +usage: migratepolicies.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--useLocal] +``` Parameter | Note ---------------- | ------------------------------------------------------------------------------------------------------ fromFile | must contain alert policy names one per line fromFileEntities | must contain APM, Browser, or Mobile application names or IDs or APM KT names or IDs (not GUIDs) -personalApiKey | Personal API Key used for GraphQL API Client calls sourceAccount | Account to fetch monitors from sourceRegion | Optional region us (default) or eu sourceApiKey | User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Alerts @@ -261,19 +271,20 @@ to move will be the union of both. [NAME, POLICY_EXISTED, POLICY_CREATED, STATUS, ERROR, CHANNELS, PUT_CHANNELS] -#### 6) python3 migrateconditions.py +#### 7) python3 migrateconditions.py **Preconditions:** migratemonitors(if migrating synthetic conditions) and migratepolicies. Any target APM , Browser, Mobile apps and Key transactions must be migrated manually. -`usage: migrateconditions.py [-h] --fromFile FROMFILE --personalApiKey PERSONALAPIKEY --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--matchSourceState] [--synthetics --app_conditions --nrql_conditions --infra_conditions]` +``` +usage: migrateconditions.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--matchSourceState] [--synthetics --app_conditions --nrql_conditions --infra_conditions] +``` Parameter | Note -------------- | -------------------------------------------------- fromFile | must contain alert policy names one per line fromFileEntities | must contain APM, Browser, or Mobile application names or IDs or APM KT names or IDs (not GUIDs) -personalApiKey | Personal API Key used for GraphQL API Client calls sourceAccount | Account to fetch monitors from sourceRegion | Optional region us (default) or eu sourceApiKey | User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Alerts @@ -311,24 +322,47 @@ if `--app_conditions` is specified. **Status:** output/sourceAccount_fromFileName_fromFileEntitiesName_targetAccount_conditions.csv -#### 7) python3 migrate_apm.py (Migrate settings for APM apps) +#### 8) python3 migrate_notifications.py (migrate destinations, channels, and workflows) -Migrate APM Apdex configuration settings. **This no longer migrates labels.** Please use migratetags.py instead for tag migrations. -usage: migrate_apm.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] - --personalApiKey PERSONALAPIKEY --sourceApiKey - SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] - --targetApiKey TARGETAPIKEY [--settings] +**Preconditions:** `store_policies`, `migratepolicies`, and `migrateconditions`. +``` +usage: migrate_notifications.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] +``` + +Parameter | Note +-------------- | -------------------------------------------------- +sourceAccount | Account to fetch monitors from +sourceRegion | Optional region us (default) or eu +sourceApiKey | User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Alerts +targetAccount | Account to migrate policies to +targetRegion | Optional region us (default) or eu +targetApiKey | User API Key for targetAccount for a user with admin (or add on / custom role equivalent) access to Alerts + +This script migrates notification destinations, channels, and workflows. + +**Warning:** Note that supported destination types are: +1. DESTINATION_TYPE_EMAIL, +1. DESTINATION_TYPE_MOBILE_PUSH, +1. DESTINATION_TYPE_SLACK_LEGACY, +1. DESTINATION_TYPE_WEBHOOK + + +#### 9) python3 migrate_apm.py (Migrate settings for APM apps) + +Migrate APM Apdex configuration settings. **This no longer migrates labels.** Please use migratetags.py instead for tag migrations. +``` +usage: migrate_apm.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --targetApiKey TARGETAPIKEY [--settings] +``` ##### Note: Ensure target apps are running or were running recently so that the target ids can be picked -#### 8) python3 migrate_dashboards.py +#### 10) python3 migrate_dashboards.py -`usage: migrate_dashboards.py [-h] --fromFile FROMFILE --sourceAccount [--sourceRegion SOURCEREGION] - SOURCEACCOUNT --sourceApiKey SOURCEAPIKEY - --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] - [--targetApiKey TARGETAPIKEY] [--accountMappingFile ACCOUNTMAPPINGFILE]` +``` +usage: migrate_dashboards.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--accountMappingFile ACCOUNTMAPPINGFILE] +``` Migrate dashboards between accounts, including modifying queries to point to the new target account. The fetchentities.py script can help create the file to pass with fromFile. @@ -343,12 +377,11 @@ targetRegion | Optional region us (default) or eu targetApiKey | This should be a User API Key for targetAccount for a user with admin (or add on / custom role equivalent) access to Dashboards accountMappingFile | Map account ids to alternatives using a dictionary in a [JSON file](account_mapping.json). Useful when moving between regions, e.g. from the us to eu region. -#### 9) python3 migratetags.py +#### 11) python3 migratetags.py -`usage: migratetags.py [-h] --fromFile FROMFILE --sourceAccount - SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY - --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --targetApiKey TARGETAPIKEY - [--apm --browser --dashboards --infrahost --infraint --lambda --mobile --securecreds --synthetics]` +``` +usage: migratetags.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --targetApiKey TARGETAPIKEY [--apm --browser --dashboards --infrahost --infraint --lambda --mobile --securecreds --synthetics] +``` Migrate entity tags between entities with matching names and entity types. @@ -372,11 +405,13 @@ securecreds | Pass this flag to migrate Synthetic secure credential entity ta synthetics | Pass this flag to migrate Synthetic monitor entity tags -#### 10) python3 updatemonitors.py **Note:** Must use fetchmonitors before using updatemonitors +#### 12) python3 updatemonitors.py **Note:** Must use fetchmonitors before using updatemonitors Potential use is for renaming/disabling migrated monitors in source account. -`usage: updatemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --timeStamp TIMESTAMP [--renamePrefix RENAMEPREFIX] [--disable]` +``` +usage: updatemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --timeStamp TIMESTAMP [--renamePrefix RENAMEPREFIX] [--disable] +``` Parameter | Note ------------- | ------------------------------------------------------------------------- @@ -399,12 +434,10 @@ output/targetAccount_fromFile_updated_monitors.csv **Status keys:** [STATUS, UPDATED_NAME, UPDATED_STATUS, UPDATED_JSON, ERROR] -#### 11) python3 fetchentities.py - -usage: fetchentities.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY - --toFile FILENAME [--tagName TAGNAME --tagValue TAGVALUE] - [--apm --browser --dashboards --infrahost --infraint --lambda --mobile --securecreds --synthetics] - +#### 13) python3 fetchentities.py +``` +usage: fetchentities.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --toFile FILENAME [--tagName TAGNAME --tagValue TAGVALUE] [--apm --browser --dashboards --infrahost --infraint --lambda --mobile --securecreds --synthetics] +``` Create a file in the output directory that contains entity names from the source account. This can be filtered by using --tagName and --tagValue. This may be beneficial for other migration scripts in this repo that require a fromFile argument. Parameter | Note @@ -427,31 +460,31 @@ synthetics | Pass this flag to list Synthetic monitor entities workload | Pass this flag to list Workloads -#### 12) python3 deletemonitors.py +#### 14) python3 deletemonitors.py -`usage: deletemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --timeStamp TIMESTAMP` +``` +usage: deletemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --timeStamp TIMESTAMP +``` Will delete monitors listed one per line in --fromFile and stored in db/targetaccount/monitors/timeStamp. The fetchentities.py script can help generate this file. -#### 13) (optional Testing purpose only) python3 deleteallmonitors.py +#### 15) (optional Testing purpose only) python3 deleteallmonitors.py #### Warning: All monitors in target account will be deleted -`usage: deleteallmonitors.py [-h] [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION]` +``` +usage: deleteallmonitors.py [-h] [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] +``` deleteallmonitors fetches all the monitors. Backs them up in db/accountId/monitors/timeStamp-bakup And deletes all the monitors ##### Note: In case this script is used in error use migratemonitors to restore the backed up monitors -#### 14) (optional) python3 store_policies.py - -usage: store_policies.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY - -Saves all alert polices in db//alert_policies/alert_policies.json - -#### 15) (optional) python3 store_violations.py +#### 16) (optional) python3 store_violations.py +``` usage: store_violations.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --startDate STARTDATE --endDate ENDDATE [--onlyOpen] +``` --sourceAccount SOURCEACCOUNT Source accountId @@ -469,10 +502,10 @@ usage: store_violations.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SO Saves all alert violations in db//alert_violations/alert_violations.json and db//alert_violations/alert_violations.csv -#### 16) (optional) python3 store_policy_entity_map.py - +#### 17) (optional) python3 store_policy_entity_map.py +``` usage: store_policy_entity_map.py [-h] --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --useLocal - +``` Builds a mapping from APM, Browser, and Mobile applications and APM key transactions to and from alert policies for any policies which contain "app conditions" as identified by the @@ -481,36 +514,32 @@ transactions to and from alert policies for any policies which contain Saves the mapping in db//alert_policies/alert_policy_entity_map.json -#### 17) python3 nrmig - Configure appropriate [config.ini](config.ini.example) and run nrmig command +#### 18) python3 nrmig +Configure appropriate [config.ini](config.ini.example) and run nrmig command. - python3 nrmig -c ./config.ini migrate policies +`python3 nrmig -c ./config.ini migrate policies` - python3 nrmig -c ./config.ini migrate conditions +`python3 nrmig -c ./config.ini migrate conditions` -#### 18) python3 fetchalldatatypes +#### 19) python3 fetchalldatatypes --hostsFile should contain hostNames(entityNames) one per line. hostsFile can also be generated by using fetchentities script. - -usage: fetchalldatatypes.py --hostsFile HOSTS_FILE --sourceAccount SOURCE_ACCOUNT_ID --sourceApiKey SOURCE_API_KEY - - --insightsQueryKey INSIGHTS_QUERY_KEY [--region NR_REGION] - +``` +usage: fetchalldatatypes.py --hostsFile HOSTS_FILE --sourceAccount SOURCE_ACCOUNT_ID --sourceApiKey SOURCE_API_KEY --insightsQueryKey INSIGHTS_QUERY_KEY [--region NR_REGION] +``` output : output/.csv file for each entityName with names of metrics and events received from that entity -#### 19) python3 wlgoldensignals.py +#### 20) python3 wlgoldensignals.py Automated script for overriding and resetting golden signals for workloads. ####Note: By default workloads only display 4 golden signals. - -usage: wlgoldensignals.py --targetAccount TARGETACCOUNT --targetApiKey TARGETAPIKEY [--targetRegion TARGETREGION] - [--tagName TAGNAME] [--tagValue TAGVALUE] [--goldenSignalsJson GOLDENSIGNALSJSON] - [--resetGoldenSignals] [--domain DOMAIN] [--type TYPE] - +``` +usage: wlgoldensignals.py --targetAccount TARGETACCOUNT --targetApiKey TARGETAPIKEY [--targetRegion TARGETREGION] [--tagName TAGNAME] [--tagValue TAGVALUE] [--goldenSignalsJson GOLDENSIGNALSJSON] [--resetGoldenSignals] [--domain DOMAIN] [--type TYPE] +``` Parameter | Note -------------- | -------------------------------------------------- targetAccount | Account containing the workloads @@ -534,10 +563,6 @@ The above will find workloads having tag Environment=WindowsProduction and then reset the golden signals for domain INFRA and type HOST - - - - ### Logging Logs are stored in logs/migrate.log Logging level can be set in migrationlogger.py. Default level for file and stdout is INFO diff --git a/migrate_account.py b/migrate_account.py index a4497e1..f175c96 100644 --- a/migrate_account.py +++ b/migrate_account.py @@ -1,3 +1,4 @@ +import argparse import os import time @@ -21,7 +22,6 @@ import migrate_notifications as mn import migratepolicies as mp import migratetags as mt -import migrate_workflows as mw import store_policies as store_policies SRC_ACCT = '1234567' @@ -105,7 +105,7 @@ def migrate_step2(): # Migrate notification channels channels_by_source_id = mn.migrate_channels(SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION, destinations_by_source_id) # Migrate workflows - workflows_by_source_id = mw.migrate_workflows(SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION, channels_by_source_id, policies_by_source_id) + workflows_by_source_id = mn.migrate_workflows(SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION, channels_by_source_id, policies_by_source_id) # Migrate APM app_apdex_threshold, end_user_apdex_threshold, and enable_real_user_monitoring settings mapm.migrate_apps(APP_FILE, SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION) # Migrate dashboards @@ -114,11 +114,21 @@ def migrate_step2(): mt.migrate_tags(APP_FILE, SRC_ACCT, SRC_REGION, SRC_API_KEY, TGT_ACCT, TGT_REGION, TGT_API_KEY, [ec.APM_APP]) -if __name__ == '__main__': - # cleanup() - fetch() - migrate_step1() - # Redirect apps, then - # migrate_step2() +def main(run_step2_only, run_cleanup): + if run_cleanup: + cleanup() + if run_step2_only: + migrate_step2() + else: + fetch() + migrate_step1() logger.info('Completed migration') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Migration script') + parser.add_argument('--step2', action='store_true', help='Run only migrate_step2') + parser.add_argument('--cleanup', action='store_true', help='Run cleanup before other steps') + args = parser.parse_args() + + main(args.step2, args.cleanup) diff --git a/migrate_notifications.py b/migrate_notifications.py index 2dc9f76..9491dac 100644 --- a/migrate_notifications.py +++ b/migrate_notifications.py @@ -1,7 +1,9 @@ import os import argparse import fetchnotifications as fetchnotifications +import fetchworkflows as fetchworkflows import library.clients.notificationsclient as notificationsclient +import library.clients.workflowsclient as workflowsclient import library.localstore as store import library.migrationlogger as m_logger import library.utils as utils @@ -9,6 +11,7 @@ log = m_logger.get_logger(os.path.basename(__file__)) nc = notificationsclient.NotificationsClient() +wc = workflowsclient.WorkflowsClient() def print_args(args, src_api_key, src_region, tgt_api_key, tgt_region): @@ -30,8 +33,8 @@ def configure_parser(): parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ or set environment variable ENV_TARGET_API_KEY') parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') - parser.add_argument('--destinations', dest='destinations', required=False, action='store_true', help='Migrate destinations') - parser.add_argument('--channels', dest='channels', required=False, action='store_true', help='Migrate channels') + # parser.add_argument('--destinations', dest='destinations', required=False, action='store_true', help='Migrate destinations') + # parser.add_argument('--channels', dest='channels', required=False, action='store_true', help='Migrate channels') return parser @@ -53,6 +56,12 @@ def create_channel(channel, tgt_acct, tgt_api_key, tgt_region): log.warn(f"Unsupported channel type: {channel['type']}, for channel: {channel['name']}") +def create_workflow(workflow, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating workflow: {workflow['name']}") + wc.create_workflow(workflow, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created workflow: {workflow['name']}") + + def migrate_destinations(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region): log.info('Destinations migration started.') destinations_by_source_id = fetchnotifications.fetch_destinations(src_api_key, src_acct, src_region) @@ -82,6 +91,82 @@ def migrate_channels(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, t return channels_by_source_id +def migrate_workflows(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region, channels_by_source_id, policies_by_source_id): + log.info('Workflows migration started.') + workflows_by_source_id = fetchworkflows.fetch_workflows(src_api_key, src_acct, src_region) + for workflow in workflows_by_source_id.values(): + hasError = False + log.info(f"Workflow name: {workflow['name']}") + # Enrich destinationConfigurations with target channel ids + log.info(f"Enriching destination configurations for target account: {tgt_acct}") + if 'destinationConfigurations' in workflow: + # Splice workflow['destinationConfigurations'] to contain only supported destinations + workflow['destinationConfigurations'][:] = [destination_configuration for destination_configuration in workflow['destinationConfigurations'] if destination_configuration['type'] in notificationsclient.SUPPORTED_DESTINATIONS] + if len(workflow['destinationConfigurations']) < 1: + log.warning(f"Workflow name: {workflow['name']} does not contain a supported destination") + continue + for destination_configuration in workflow['destinationConfigurations']: + if 'channelId' in destination_configuration: + source_channel_id = destination_configuration['channelId'] + if source_channel_id in channels_by_source_id: + channel = channels_by_source_id.get(source_channel_id) + if 'targetChannelId' in channel: + destination_configuration['targetChannelId'] = channel['targetChannelId'] + log.info(f"Target channel id: {destination_configuration['targetChannelId']} found for source channel id: {source_channel_id}") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Target channel id unavailable for source channel id: {source_channel_id} with type: {channel['type']}") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Source channel id: {source_channel_id} unavailable") + else: + hasError = True + log.info(f"Workflow name: {workflow['name']} with id: {workflow['id']} has no destinationConfigurations: {workflow}") + # Enrich issuesFilter with target account id and source policy ids + log.info(f"Enriching issues filter for target account: {tgt_acct}") + if "issuesFilter" in workflow: + workflow['issuesFilter']['targetAccountId'] = int(tgt_acct) + for predicate in workflow['issuesFilter']['predicates']: + if predicate['attribute'] == 'labels.policyIds': + targetValues = [] + for source_policy_id in predicate['values']: + if int(source_policy_id) in policies_by_source_id: + policy = policies_by_source_id.get(int(source_policy_id)) + if 'targetPolicyId' in policy: + targetValues.append(str(policy['targetPolicyId'])) + log.info(f"Target policy id: {str(policy['targetPolicyId'])} found for source policy id: {source_policy_id} ") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Target policy id unavailable for source policy id: {source_policy_id}") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Target policy id unavailable for source policy id: {source_policy_id}") + if len(targetValues) > 0: + predicate['targetValues'] = targetValues + else: + log.debug(f"Ignoring predicate {predicate}") + else: + hasError = True + log.info(f"Workflow name: {workflow['name']} with id: {workflow['id']} has no issuesFilter: {workflow}") + # Create the workflow + if not hasError: + create_workflow(workflow, tgt_acct, tgt_api_key, tgt_region) + else: + log.error(f"Unable to create workflow name: {workflow['name']}, {workflow}") + log.info('Workflows migration complete.') + return channels_by_source_id + + +def get_policies_by_source_id(source_account): + # Get policies by source id + data = store.load_alert_policies(source_account) + if (data is None) or ('policies' not in data): + log.error('No policies found in local store') + utils.error_message_and_exit(f'No policies found for sourceAccount {source_account}. Please ensure you have run store_policies.py, migrateconditions.py, and migratepolicies.py for the source account.') + # Create the policies_by_source_id dictionary + return {policy['id']: policy for policy in data['policies']} + + def main(): parser = configure_parser() args = parser.parse_args() @@ -93,14 +178,16 @@ def main(): utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') src_region = utils.ensure_source_region(args) tgt_region = utils.ensure_target_region(args) + source_account = str(args.sourceAccount[0]) + target_account = str(args.targetAccount[0]) + policies_by_source_id = get_policies_by_source_id(source_account) print_args(args, src_api_key, src_region, tgt_api_key, tgt_region) - if args.destinations: - migrate_destinations(args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], tgt_api_key, tgt_region) - elif args.channels: - # TODO missing destinations_by_source_id argument! - migrate_channels(args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], tgt_api_key, tgt_region) - else: - log.info("pass [--destinations | --channels] to fetch configuration") + # Migrate notification destinations + destinations_by_source_id = migrate_destinations(source_account, src_api_key, src_region, target_account, tgt_api_key, tgt_region) + # Migrate notification channels + channels_by_source_id = migrate_channels(source_account, src_api_key, src_region, target_account, tgt_api_key, tgt_region, destinations_by_source_id) + # Migrate workflows + workflows_by_source_id = migrate_workflows(source_account, src_api_key, src_region, target_account, tgt_api_key, tgt_region, channels_by_source_id, policies_by_source_id) if __name__ == '__main__': diff --git a/migrate_workflows.py b/migrate_workflows.py deleted file mode 100644 index 73364e5..0000000 --- a/migrate_workflows.py +++ /dev/null @@ -1,126 +0,0 @@ -import os -import argparse -import fetchworkflows as fetchworkflows -import library.clients.notificationsclient as notificationsclient -import library.clients.workflowsclient as workflowsclient -import library.localstore as store -import library.migrationlogger as m_logger -import library.utils as utils - - -log = m_logger.get_logger(os.path.basename(__file__)) -wc = workflowsclient.WorkflowsClient() - - -def print_args(args, src_api_key, src_region, tgt_api_key, tgt_region): - log.info("Using sourceAccount : " + str(args.sourceAccount[0])) - log.info("Using sourceApiKey : " + len(src_api_key[:-4])*"*"+src_api_key[-4:]) - log.info("sourceRegion : " + src_region) - log.info("Using targetAccount : " + str(args.targetAccount[0])) - log.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) - log.info("targetRegion : " + tgt_region) - - -def configure_parser(): - parser = argparse.ArgumentParser(description='Migrate Workflows') - parser.add_argument('--sourceAccount', nargs=1, type=int, required=True, help='Source accountId') - parser.add_argument('--sourceApiKey', nargs=1, type=str, required=True, help='Source account API Key or \ - set environment variable ENV_SOURCE_API_KEY') - parser.add_argument('--sourceRegion', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') - parser.add_argument('--targetAccount', nargs=1, type=int, required=True, help='Target accountId') - parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ - or set environment variable ENV_TARGET_API_KEY') - parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') - return parser - - -def create_workflow(workflow, tgt_acct, tgt_api_key, tgt_region): - log.info(f"Creating workflow: {workflow['name']}") - wc.create_workflow(workflow, tgt_api_key, tgt_acct, tgt_region) - log.info(f"Created workflow: {workflow['name']}") - - -def migrate_workflows(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region, channels_by_source_id, policies_by_source_id): - log.info('Workflows migration started.') - workflows_by_source_id = fetchworkflows.fetch_workflows(src_api_key, src_acct, src_region) - for workflow in workflows_by_source_id.values(): - hasError = False - log.info(f"Workflow name: {workflow['name']}") - # Enrich destinationConfigurations with target channel ids - log.info(f"Enriching destination configurations for target account: {tgt_acct}") - if 'destinationConfigurations' in workflow: - # Splice workflow['destinationConfigurations'] to contain only supported destinations - workflow['destinationConfigurations'][:] = [destination_configuration for destination_configuration in workflow['destinationConfigurations'] if destination_configuration['type'] in notificationsclient.SUPPORTED_DESTINATIONS] - if len(workflow['destinationConfigurations']) < 1: - log.warning(f"Workflow name: {workflow['name']} does not contain a supported destination") - continue - for destination_configuration in workflow['destinationConfigurations']: - if 'channelId' in destination_configuration: - source_channel_id = destination_configuration['channelId'] - if source_channel_id in channels_by_source_id: - channel = channels_by_source_id.get(source_channel_id) - if 'targetChannelId' in channel: - destination_configuration['targetChannelId'] = channel['targetChannelId'] - log.info(f"Target channel id: {destination_configuration['targetChannelId']} found for source channel id: {source_channel_id}") - else: - hasError = True - log.error(f"Unable to create workflow name: {workflow['name']}. Target channel id unavailable for source channel id: {source_channel_id} with type: {channel['type']}") - else: - hasError = True - log.error(f"Unable to create workflow name: {workflow['name']}. Source channel id: {source_channel_id} unavailable") - else: - hasError = True - log.info(f"Workflow name: {workflow['name']} with id: {workflow['id']} has no destinationConfigurations: {workflow}") - # Enrich issuesFilter with target account id and source policy ids - log.info(f"Enriching issues filter for target account: {tgt_acct}") - if "issuesFilter" in workflow: - workflow['issuesFilter']['targetAccountId'] = int(tgt_acct) - for predicate in workflow['issuesFilter']['predicates']: - if predicate['attribute'] == 'labels.policyIds': - targetValues = [] - for source_policy_id in predicate['values']: - if int(source_policy_id) in policies_by_source_id: - policy = policies_by_source_id.get(int(source_policy_id)) - if 'targetPolicyId' in policy: - targetValues.append(str(policy['targetPolicyId'])) - log.info(f"Target policy id: {str(policy['targetPolicyId'])} found for source policy id: {source_policy_id} ") - else: - hasError = True - log.error(f"Unable to create workflow name: {workflow['name']}. Target policy id unavailable for source policy id: {source_policy_id}") - else: - hasError = True - log.error(f"Unable to create workflow name: {workflow['name']}. Target policy id unavailable for source policy id: {source_policy_id}") - if len(targetValues) > 0: - predicate['targetValues'] = targetValues - else: - log.debug(f"Ignoring predicate {predicate}") - else: - hasError = True - log.info(f"Workflow name: {workflow['name']} with id: {workflow['id']} has no issuesFilter: {workflow}") - # Create the workflow - if not hasError: - create_workflow(workflow, tgt_acct, tgt_api_key, tgt_region) - else: - log.error(f"Unable to create workflow name: {workflow['name']}, {workflow}") - log.info('Workflows migration complete.') - return channels_by_source_id - - -def main(): - parser = configure_parser() - args = parser.parse_args() - src_api_key = utils.ensure_source_api_key(args) - if not src_api_key: - utils.error_and_exit('source_api_key', 'ENV_SOURCE_API_KEY') - tgt_api_key = utils.ensure_target_api_key(args) - if not tgt_api_key: - utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') - src_region = utils.ensure_source_region(args) - tgt_region = utils.ensure_target_region(args) - print_args(args, src_api_key, src_region, tgt_api_key, tgt_region) - # TODO missing channels_by_source_id and policies_by_source_id arguments! - migrate_workflows(args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], tgt_api_key, tgt_region) - - -if __name__ == '__main__': - main()