diff --git a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py index 8e5fa9ebcb4..684a34956d6 100644 --- a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py +++ b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py @@ -236,7 +236,25 @@ def find_input_artifact(self, artifact_name: str) -> CodePipelineArtifact | None return artifact return None - def get_artifact(self, artifact_name: str, filename: str) -> str | None: + def find_output_artifact(self, artifact_name: str) -> CodePipelineArtifact | None: + """Find an output artifact by artifact name + + Parameters + ---------- + artifact_name : str + The name of the output artifact to look for + + Returns + ------- + CodePipelineArtifact, None + Matching CodePipelineArtifact if found + """ + for artifact in self.data.output_artifacts: + if artifact.name == artifact_name: + return artifact + return None + + def get_artifact(self, artifact_name: str, filename: str | None = None) -> str | None: """Get a file within an artifact zip on s3 Parameters @@ -245,6 +263,7 @@ def get_artifact(self, artifact_name: str, filename: str) -> str | None: Name of the S3 artifact to download filename : str The file name within the artifact zip to extract as a string + If None, this will return the raw object body. Returns ------- @@ -255,10 +274,53 @@ def get_artifact(self, artifact_name: str, filename: str) -> str | None: if artifact is None: return None - with tempfile.NamedTemporaryFile() as tmp_file: - s3 = self.setup_s3_client() - bucket = artifact.location.s3_location.bucket_name - key = artifact.location.s3_location.key - s3.download_file(bucket, key, tmp_file.name) - with zipfile.ZipFile(tmp_file.name, "r") as zip_file: - return zip_file.read(filename).decode("UTF-8") + s3 = self.setup_s3_client() + bucket = artifact.location.s3_location.bucket_name + key = artifact.location.s3_location.key + + if filename: + with tempfile.NamedTemporaryFile() as tmp_file: + s3.download_file(bucket, key, tmp_file.name) + with zipfile.ZipFile(tmp_file.name, "r") as zip_file: + return zip_file.read(filename).decode("UTF-8") + + return s3.get_object(Bucket=bucket, Key=key)["Body"].read() + + def put_artifact(self, artifact_name: str, body: Any, content_type: str) -> None: + """Writes an object to an s3 output artifact. + + Parameters + ---------- + artifact_name : str + Name of the S3 artifact to upload + body: Any + The data to be written. Binary files should use io.BytesIO. + content_type: str + The content type of the data. + + Returns + ------- + None + """ + artifact = self.find_output_artifact(artifact_name) + if artifact is None: + raise ValueError(f"Artifact not found: {artifact_name}.") + + s3 = self.setup_s3_client() + bucket = artifact.location.s3_location.bucket_name + key = artifact.location.s3_location.key + encryption_key_id = self.data.encryption_key.get_id + encryption_key_type = self.data.encryption_key.get_type + + if encryption_key_type == "KMS": + encryption_key_type = "aws:kms" + + s3.put_object( + Bucket=bucket, + Key=key, + ContentType=content_type, + Body=body, + ServerSideEncryption=encryption_key_type, + SSEKMSKeyId=encryption_key_id, + BucketKeyEnabled=True, + ) diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 01fe21e20b8..a207a7b787f 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -679,7 +679,12 @@ Data classes and utility functions to help create continuous delivery pipelines else: template = event.get_artifact(artifact_name, template_file) # Kick off a stack update or create - start_update_or_create(job_id, stack, template) + result = start_update_or_create(job_id, stack, template) + event.put_artifact( + artifact_name="json-artifact", + body=json.dumps(result), + content_type="application/json" + ) except Exception as e: # If any other exceptions which we didn't expect are raised # then fail the job and log the exception message. diff --git a/tests/events/codePipelineEventData.json b/tests/events/codePipelineEventData.json index 7552f19ca93..3635312c38b 100644 --- a/tests/events/codePipelineEventData.json +++ b/tests/events/codePipelineEventData.json @@ -40,6 +40,10 @@ "secretAccessKey": "6CGtmAa3lzWtV7a...", "sessionToken": "IQoJb3JpZ2luX2VjEA...", "expirationTime": 1575493418000 + }, + "encryptionKey": { + "id": "someKey", + "type": "KMS" } } } diff --git a/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py b/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py index 75e68b44396..8a5984dc810 100644 --- a/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py +++ b/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py @@ -1,7 +1,9 @@ import json import zipfile +from io import StringIO import pytest +from botocore.response import StreamingBody from pytest_mock import MockerFixture from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent @@ -184,3 +186,86 @@ def download_file(bucket: str, key: str, tmp_name: str): }, ) assert artifact_str == file_contents + + +def test_raw_code_pipeline_get_artifact(mocker: MockerFixture): + raw_content = json.dumps({"steve": "french"}) + + class MockClient: + @staticmethod + def get_object(Bucket: str, Key: str): + assert Bucket == "us-west-2-123456789012-my-pipeline" + assert Key == "my-pipeline/test-api-2/TdOSFRV" + return {"Body": StreamingBody(StringIO(str(raw_content)), len(str(raw_content)))} + + s3 = mocker.patch("boto3.client") + s3.return_value = MockClient() + + event = CodePipelineJobEvent(load_event("codePipelineEventData.json")) + + artifact_str = event.get_artifact(artifact_name="my-pipeline-SourceArtifact") + + s3.assert_called_once_with( + "s3", + **{ + "aws_access_key_id": event.data.artifact_credentials.access_key_id, + "aws_secret_access_key": event.data.artifact_credentials.secret_access_key, + "aws_session_token": event.data.artifact_credentials.session_token, + }, + ) + assert artifact_str == raw_content + + +def test_code_pipeline_put_artifact(mocker: MockerFixture): + + raw_content = json.dumps({"steve": "french"}) + artifact_content_type = "application/json" + event = CodePipelineJobEvent(load_event("codePipelineEventData.json")) + artifact_name = event.data.output_artifacts[0].name + + class MockClient: + @staticmethod + def put_object( + Bucket: str, + Key: str, + ContentType: str, + Body: str, + ServerSideEncryption: str, + SSEKMSKeyId: str, + BucketKeyEnabled: bool, + ): + output_artifact = event.find_output_artifact(artifact_name) + assert Bucket == output_artifact.location.s3_location.bucket_name + assert Key == output_artifact.location.s3_location.key + assert ContentType == artifact_content_type + assert Body == raw_content + assert ServerSideEncryption == "aws:kms" + assert SSEKMSKeyId == event.data.encryption_key.get_id + assert BucketKeyEnabled is True + + s3 = mocker.patch("boto3.client") + s3.return_value = MockClient() + + event.put_artifact( + artifact_name=artifact_name, + body=raw_content, + content_type=artifact_content_type, + ) + + s3.assert_called_once_with( + "s3", + **{ + "aws_access_key_id": event.data.artifact_credentials.access_key_id, + "aws_secret_access_key": event.data.artifact_credentials.secret_access_key, + "aws_session_token": event.data.artifact_credentials.session_token, + }, + ) + + +def test_code_pipeline_put_output_artifact_not_found(): + raw_event = load_event("codePipelineEventData.json") + parsed_event = CodePipelineJobEvent(raw_event) + + assert parsed_event.find_output_artifact("not-found") is None + with pytest.raises(ValueError): + parsed_event.put_artifact(artifact_name="not-found", body="", content_type="text/plain")