Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(event_source): Extend CodePipeline Artifact Capabilities #5448

Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,25 @@ def find_input_artifact(self, artifact_name: str) -> CodePipelineArtifact | None
return artifact
return None

def get_artifact(self, artifact_name: str, filename: str) -> str | None:
def find_output_artifact(self, artifact_name: str) -> CodePipelineArtifact | None:
"""Find an output artifact by artifact name
mw-root marked this conversation as resolved.
Show resolved Hide resolved
Parameters
----------
artifact_name : str
The name of the output artifact to look for
Returns
-------
CodePipelineArtifact, None
Matching CodePipelineArtifact if found
"""
for artifact in self.data.output_artifacts:
if artifact.name == artifact_name:
return artifact
return None

def get_artifact(self, artifact_name: str, filename: str | None = None) -> str | None:
"""Get a file within an artifact zip on s3
Parameters
Expand All @@ -245,6 +263,7 @@ def get_artifact(self, artifact_name: str, filename: str) -> str | None:
Name of the S3 artifact to download
filename : str
The file name within the artifact zip to extract as a string
If None, this will return the raw object body.
Returns
-------
Expand All @@ -255,10 +274,53 @@ def get_artifact(self, artifact_name: str, filename: str) -> str | None:
if artifact is None:
return None

with tempfile.NamedTemporaryFile() as tmp_file:
s3 = self.setup_s3_client()
bucket = artifact.location.s3_location.bucket_name
key = artifact.location.s3_location.key
s3.download_file(bucket, key, tmp_file.name)
with zipfile.ZipFile(tmp_file.name, "r") as zip_file:
return zip_file.read(filename).decode("UTF-8")
s3 = self.setup_s3_client()
bucket = artifact.location.s3_location.bucket_name
key = artifact.location.s3_location.key

if filename:
with tempfile.NamedTemporaryFile() as tmp_file:
s3.download_file(bucket, key, tmp_file.name)
with zipfile.ZipFile(tmp_file.name, "r") as zip_file:
return zip_file.read(filename).decode("UTF-8")

return s3.get_object(Bucket=bucket, Key=key)["Body"].read()
anafalcao marked this conversation as resolved.
Show resolved Hide resolved

def put_artifact(self, artifact_name: str, body: Any, content_type: str) -> None:
"""Writes an object to an s3 output artifact.
Parameters
----------
artifact_name : str
Name of the S3 artifact to upload
body: Any
The data to be written. Binary files should use io.BytesIO.
content_type: str
The content type of the data.
Returns
-------
None
"""
artifact = self.find_output_artifact(artifact_name)
if artifact is None:
raise ValueError(f"Artifact not found: {artifact_name}.")

s3 = self.setup_s3_client()
bucket = artifact.location.s3_location.bucket_name
key = artifact.location.s3_location.key
encryption_key_id = self.data.encryption_key.get_id
encryption_key_type = self.data.encryption_key.get_type

if encryption_key_type == "KMS":
encryption_key_type = "aws:kms"
mw-root marked this conversation as resolved.
Show resolved Hide resolved

s3.put_object(
Bucket=bucket,
Key=key,
ContentType=content_type,
Body=body,
ServerSideEncryption=encryption_key_type,
SSEKMSKeyId=encryption_key_id,
BucketKeyEnabled=True,
)
7 changes: 6 additions & 1 deletion docs/utilities/data_classes.md
Original file line number Diff line number Diff line change
Expand Up @@ -679,7 +679,12 @@ Data classes and utility functions to help create continuous delivery pipelines
else:
template = event.get_artifact(artifact_name, template_file)
# Kick off a stack update or create
start_update_or_create(job_id, stack, template)
result = start_update_or_create(job_id, stack, template)
event.put_artifact(
artifact_name="json-artifact",
leandrodamascena marked this conversation as resolved.
Show resolved Hide resolved
body=json.dumps(result),
content_type="application/json"
)
except Exception as e:
# If any other exceptions which we didn't expect are raised
# then fail the job and log the exception message.
Expand Down
4 changes: 4 additions & 0 deletions tests/events/codePipelineEventData.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@
"secretAccessKey": "6CGtmAa3lzWtV7a...",
"sessionToken": "IQoJb3JpZ2luX2VjEA...",
"expirationTime": 1575493418000
},
"encryptionKey": {
"id": "someKey",
"type": "KMS"
}
}
}
Expand Down
85 changes: 85 additions & 0 deletions tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import json
import zipfile
from io import StringIO

import pytest
from botocore.response import StreamingBody
from pytest_mock import MockerFixture

from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent
Expand Down Expand Up @@ -184,3 +186,86 @@ def download_file(bucket: str, key: str, tmp_name: str):
},
)
assert artifact_str == file_contents


def test_raw_code_pipeline_get_artifact(mocker: MockerFixture):
raw_content = json.dumps({"steve": "french"})

class MockClient:
@staticmethod
def get_object(Bucket: str, Key: str):
assert Bucket == "us-west-2-123456789012-my-pipeline"
assert Key == "my-pipeline/test-api-2/TdOSFRV"
return {"Body": StreamingBody(StringIO(str(raw_content)), len(str(raw_content)))}

s3 = mocker.patch("boto3.client")
s3.return_value = MockClient()

event = CodePipelineJobEvent(load_event("codePipelineEventData.json"))

artifact_str = event.get_artifact(artifact_name="my-pipeline-SourceArtifact")

s3.assert_called_once_with(
"s3",
**{
"aws_access_key_id": event.data.artifact_credentials.access_key_id,
"aws_secret_access_key": event.data.artifact_credentials.secret_access_key,
"aws_session_token": event.data.artifact_credentials.session_token,
},
)
assert artifact_str == raw_content


def test_code_pipeline_put_artifact(mocker: MockerFixture):

raw_content = json.dumps({"steve": "french"})
artifact_content_type = "application/json"
event = CodePipelineJobEvent(load_event("codePipelineEventData.json"))
artifact_name = event.data.output_artifacts[0].name

class MockClient:
@staticmethod
def put_object(
Bucket: str,
Key: str,
ContentType: str,
Body: str,
ServerSideEncryption: str,
SSEKMSKeyId: str,
BucketKeyEnabled: bool,
):
output_artifact = event.find_output_artifact(artifact_name)
assert Bucket == output_artifact.location.s3_location.bucket_name
assert Key == output_artifact.location.s3_location.key
assert ContentType == artifact_content_type
assert Body == raw_content
assert ServerSideEncryption == "aws:kms"
assert SSEKMSKeyId == event.data.encryption_key.get_id
assert BucketKeyEnabled is True

s3 = mocker.patch("boto3.client")
s3.return_value = MockClient()

event.put_artifact(
artifact_name=artifact_name,
body=raw_content,
content_type=artifact_content_type,
)

s3.assert_called_once_with(
"s3",
**{
"aws_access_key_id": event.data.artifact_credentials.access_key_id,
"aws_secret_access_key": event.data.artifact_credentials.secret_access_key,
"aws_session_token": event.data.artifact_credentials.session_token,
},
)


def test_code_pipeline_put_output_artifact_not_found():
raw_event = load_event("codePipelineEventData.json")
parsed_event = CodePipelineJobEvent(raw_event)

assert parsed_event.find_output_artifact("not-found") is None
with pytest.raises(ValueError):
parsed_event.put_artifact(artifact_name="not-found", body="", content_type="text/plain")
Loading