Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OY-4608: Muutoksia Lampi-skriptiin #663

Merged
merged 1 commit into from
Jan 2, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion scripts/lampi-export/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ WORKDIR /opt/export
COPY scripts/lampi-export/lampi-export.sh /opt/export
COPY scripts/lampi-export/send-error-message.sh /opt/export

ENTRYPOINT ["./lampi-export.sh", "||", "./send-error-message.sh"]
ENTRYPOINT exec ./lampi-export.sh || ./send-error-message.sh
47 changes: 23 additions & 24 deletions scripts/lampi-export/lampi-export.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ else
fi
db_name="ehoks"
db_user="app"
assume_access_key_id=""
assume_secret_access_key=""
assume_session_token=""

case $ENV_NAME in
"sade")
Expand All @@ -29,18 +32,21 @@ case $ENV_NAME in
lampi_s3_bucket="oph-lampi-dev"
esac

refresh_assume_role() {
local -r credentials_json=$(aws sts assume-role --role-arn "$assume_role_arn" --external-id "$ssm_external_id" --role-session-name "ehoks-lampi-export-$ENV_NAME")
assume_access_key_id=$(jq -r '.Credentials.AccessKeyId' <<<"${credentials_json}")
assume_secret_access_key=$(jq -r '.Credentials.SecretAccessKey' <<<"${credentials_json}")
assume_session_token=$(jq -r '.Credentials.SessionToken' <<<"${credentials_json}")
local -r assume_expiration=$(jq -r '.Credentials.Expiration' <<<"${credentials_json}")
log "INFO" "Assumed temporary access key ${assume_access_key_id} (expiration ${assume_expiration})"
}

dump_and_upload_db_to_lampi() {
log "INFO" "Start dump_and_upload_db_to_lampi"
log "DEBUG" "Params: db_hostname ${db_hostname}, db_name ${db_name}, ENV_NAME $ENV_NAME, local_s3_bucket $local_s3_bucket, lampi_s3_bucket $lampi_s3_bucket assume_role_arn $assume_role_arn"

log "INFO" "Starting ${db_name} database data dump"

local -r credentials_json=$(aws sts assume-role --role-arn "$assume_role_arn" --external-id "$ssm_external_id" --role-session-name "ehoks-lampi-export-$ENV_NAME")
local -r assume_role_access_key_id=$(jq -r '.Credentials.AccessKeyId' <<<"${credentials_json}")
local -r assume_role_secret_access_key=$(jq -r '.Credentials.SecretAccessKey' <<<"${credentials_json}")
local -r assume_role_session_token=$(jq -r '.Credentials.SessionToken' <<<"${credentials_json}")
log "INFO" "Assumed temporary access key ${assume_role_access_key_id}"

local -r db_password="$ssm_app_user_password"

# aws s3 extension and granting privileges need to be created with master user:
Expand All @@ -59,11 +65,11 @@ dump_and_upload_db_to_lampi() {
log "INFO" "Exporting table $db_table to local S3 $s3_url"
local files_uploaded=$(pg_command "$db_password" "SELECT files_uploaded FROM aws_s3.query_export_to_s3('SELECT * FROM ${reporting_schema_name}.${db_table}', aws_commons.create_s3_uri('$local_s3_bucket', '$s3_key', 'eu-west-1'), options := 'format csv, header true')" 1 1)
log "INFO" "Successfully exported table ${reporting_schema_name}.${db_table} to local S3 $s3_url"
copy_table_to_lampi "$s3_url" $files_uploaded "$assume_role_access_key_id" "$assume_role_secret_access_key" "$assume_role_session_token"
copy_table_to_lampi "$s3_url" $files_uploaded
done

generate_and_upload_schema_file "$db_password" "$assume_role_access_key_id" "$assume_role_secret_access_key" "$assume_role_session_token"
upload_file_to_lampi "$lampi_manifest_file" "$assume_role_access_key_id" "$assume_role_secret_access_key" "$assume_role_session_token" > /dev/null
generate_and_upload_schema_file "$db_password"
upload_file_to_lampi "$lampi_manifest_file" > /dev/null
log "INFO" "manifest.json" $(cat "$lampi_manifest_file")
}

Expand All @@ -87,10 +93,7 @@ pg_command() {
copy_table_to_lampi() {
local -r source_url="$1"
local -r file_parts=$2
local -r access_key_id="$3"
local -r secret_access_key="$4"
local -r session_token="$5"

refresh_assume_role
local -r s3_key="fulldump/$system_name/$version/$(basename "${source_url}").gz"
local -r target_url="s3://$lampi_s3_bucket/$s3_key"

Expand All @@ -100,10 +103,10 @@ copy_table_to_lampi() {
for i in $(seq 2 "$file_parts"); do
aws s3 cp "${source_url}_part${i}" -
done
fi) | gzip | AWS_ACCESS_KEY_ID="$access_key_id" AWS_SECRET_ACCESS_KEY="$secret_access_key" AWS_SESSION_TOKEN="$session_token" aws s3 cp - "$target_url"
fi) | gzip | AWS_ACCESS_KEY_ID="$assume_access_key_id" AWS_SECRET_ACCESS_KEY="$assume_secret_access_key" AWS_SESSION_TOKEN="$assume_session_token" aws s3 cp - "$target_url"

log "INFO" "Fetch object version for $s3_key in $lampi_s3_bucket"
local -r obj_version=$(AWS_ACCESS_KEY_ID="$access_key_id" AWS_SECRET_ACCESS_KEY="$secret_access_key" AWS_SESSION_TOKEN="$session_token" aws s3api head-object --region eu-west-1 --bucket "$lampi_s3_bucket" --key "$s3_key" --output json | jq -r .VersionId)
local -r obj_version=$(AWS_ACCESS_KEY_ID="$assume_access_key_id" AWS_SECRET_ACCESS_KEY="$assume_secret_access_key" AWS_SESSION_TOKEN="$assume_session_token" aws s3api head-object --region eu-west-1 --bucket "$lampi_s3_bucket" --key "$s3_key" --output json | jq -r .VersionId)

log "INFO" "Update received object version to manifest: $s3_key = $obj_version"
local -r item=$(lampi_manifest_item "$s3_key" "$obj_version")
Expand All @@ -128,27 +131,23 @@ lampi_manifest_item() {

generate_and_upload_schema_file() {
local -r db_password="$1"
local -r access_key_id="$2"
local -r secret_access_key="$3"
local -r session_token="$4"
refresh_assume_role
local -r schema_file="ehoks-$reporting_schema_name.schema"
touch "$schema_file"
log "INFO" "Generating schema file $schema_file"
PGPASSWORD="${db_password}" pg_dump -Fc --section=pre-data --section=post-data --no-comments --no-privilege --no-owner --host "$db_hostname" --username "$db_user" --schema-only --file "$schema_file" --schema "$reporting_schema_name" -t "$reporting_schema_name.*" "$db_name"
local -r schema=$(upload_file_to_lampi "$schema_file" "$access_key_id" "$secret_access_key" "$session_token")
local -r schema=$(upload_file_to_lampi "$schema_file")
add_to_manifest ".schema += $schema"
}

upload_file_to_lampi() {
local -r file="$1"
local -r access_key_id="$2"
local -r secret_access_key="$3"
local -r session_token="$4"
refresh_assume_role > /dev/null
local -r file_s3_key="fulldump/$system_name/$version/$file"
local -r file_s3_url="s3://$local_s3_bucket/$file_s3_key"
local -r put_result=$(aws s3api put-object --region eu-west-1 --body "$file" --bucket "$local_s3_bucket" --key "$file_s3_key" --output json)
local -r cp_result=$(aws s3 cp "$file_s3_url" - | AWS_ACCESS_KEY_ID="$access_key_id" AWS_SECRET_ACCESS_KEY="$secret_access_key" AWS_SESSION_TOKEN="$session_token" aws s3 cp - "s3://$lampi_s3_bucket/$file_s3_key")
local -r obj_version=$(AWS_ACCESS_KEY_ID="$access_key_id" AWS_SECRET_ACCESS_KEY="$secret_access_key" AWS_SESSION_TOKEN="$session_token" aws s3api head-object --region eu-west-1 --bucket "$lampi_s3_bucket" --key "$file_s3_key" --output json | jq -r .VersionId)
local -r cp_result=$(aws s3 cp "$file_s3_url" - | AWS_ACCESS_KEY_ID="$assume_access_key_id" AWS_SECRET_ACCESS_KEY="$assume_secret_access_key" AWS_SESSION_TOKEN="$assume_session_token" aws s3 cp - "s3://$lampi_s3_bucket/$file_s3_key")
local -r obj_version=$(AWS_ACCESS_KEY_ID="$assume_access_key_id" AWS_SECRET_ACCESS_KEY="$assume_secret_access_key" AWS_SESSION_TOKEN="$assume_session_token" aws s3api head-object --region eu-west-1 --bucket "$lampi_s3_bucket" --key "$file_s3_key" --output json | jq -r .VersionId)
echo $(lampi_manifest_item "$file_s3_key" "$obj_version")
}

Expand Down
4 changes: 2 additions & 2 deletions scripts/lampi-export/send-error-message.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash

echo "Sending error message to Slack webhook $ssm_monitor_slack_webhook"
set -eu

curl -X POST --data '{"blocks":[{"type":"header","text":{"type":"plain_text","text":"⚠️ Error in eHOKS -> Lampi export: check logs","emoji":true}}]}' -H 'content-type: application/json' -H 'accept: application/json' $ssm_monitor_slack_webhook
curl -X POST --data '{"blocks":[{"type":"header","text":{"type":"plain_text","text":"⚠️ Error in eHOKS -> Lampi export: check logs","emoji":true}}]}' -H 'content-type: application/json' -H 'accept: application/json' "$ssm_monitor_slack_webhook"
Loading