diff --git a/.latest-tag-aws-sdk-go b/.latest-tag-aws-sdk-go index 38838f09..0e263297 100644 --- a/.latest-tag-aws-sdk-go +++ b/.latest-tag-aws-sdk-go @@ -1 +1 @@ -release-2024-10-11 +release-2024-10-14 diff --git a/src/aws_codepipeline.erl b/src/aws_codepipeline.erl index 647c9467..b815fa59 100644 --- a/src/aws_codepipeline.erl +++ b/src/aws_codepipeline.erl @@ -625,7 +625,8 @@ %% Example: %% failure_conditions() :: #{ %% <<"conditions">> => list(condition()()), -%% <<"result">> => list(any()) +%% <<"result">> => list(any()), +%% <<"retryConfiguration">> => retry_configuration() %% } -type failure_conditions() :: #{binary() => any()}. @@ -652,6 +653,14 @@ %% } -type stage_context() :: #{binary() => any()}. +%% Example: +%% retry_stage_metadata() :: #{ +%% <<"autoStageRetryAttempt">> => integer(), +%% <<"latestRetryTrigger">> => list(any()), +%% <<"manualStageRetryAttempt">> => integer() +%% } +-type retry_stage_metadata() :: #{binary() => any()}. + %% Example: %% enable_stage_transition_input() :: #{ %% <<"pipelineName">> := string(), @@ -903,6 +912,12 @@ %% } -type resource_not_found_exception() :: #{binary() => any()}. +%% Example: +%% retry_configuration() :: #{ +%% <<"retryMode">> => list(any()) +%% } +-type retry_configuration() :: #{binary() => any()}. + %% Example: %% input_artifact() :: #{ %% <<"name">> => string() @@ -1675,6 +1690,7 @@ %% <<"latestExecution">> => stage_execution(), %% <<"onFailureConditionState">> => stage_condition_state(), %% <<"onSuccessConditionState">> => stage_condition_state(), +%% <<"retryStageMetadata">> => retry_stage_metadata(), %% <<"stageName">> => string() %% } -type stage_state() :: #{binary() => any()}. @@ -2654,7 +2670,8 @@ list_pipelines(Client, Input, Options) request(Client, <<"ListPipelines">>, Input, Options). %% @doc Lists the rule executions that have occurred in a pipeline configured -%% for conditions with rules. +%% for conditions +%% with rules. -spec list_rule_executions(aws_client:aws_client(), list_rule_executions_input()) -> {ok, list_rule_executions_output(), tuple()} | {error, any()} | diff --git a/src/aws_mailmanager.erl b/src/aws_mailmanager.erl index 4cd177f0..a92533db 100644 --- a/src/aws_mailmanager.erl +++ b/src/aws_mailmanager.erl @@ -158,7 +158,9 @@ %% Example: %% get_archive_message_response() :: #{ -%% <<"MessageDownloadLink">> => string() +%% <<"Envelope">> => envelope(), +%% <<"MessageDownloadLink">> => string(), +%% <<"Metadata">> => metadata() %% } -type get_archive_message_response() :: #{binary() => any()}. @@ -353,6 +355,7 @@ %% <<"ExportDestinationConfiguration">> := list(), %% <<"Filters">> => archive_filters(), %% <<"FromTimestamp">> := [non_neg_integer()], +%% <<"IncludeMetadata">> => [boolean()], %% <<"MaxResults">> => integer(), %% <<"ToTimestamp">> := [non_neg_integer()] %% } @@ -373,6 +376,19 @@ %% } -type list_addon_instances_response() :: #{binary() => any()}. +%% Example: +%% metadata() :: #{ +%% <<"IngressPointId">> => string(), +%% <<"RuleSetId">> => string(), +%% <<"SenderHostname">> => [string()], +%% <<"SenderIpAddress">> => string(), +%% <<"Timestamp">> => [non_neg_integer()], +%% <<"TlsCipherSuite">> => [string()], +%% <<"TlsProtocol">> => [string()], +%% <<"TrafficPolicyId">> => string() +%% } +-type metadata() :: #{binary() => any()}. + %% Example: %% list_traffic_policies_response() :: #{ %% <<"NextToken">> => string(), @@ -647,6 +663,14 @@ %% } -type list_addon_subscriptions_request() :: #{binary() => any()}. +%% Example: +%% envelope() :: #{ +%% <<"From">> => [string()], +%% <<"Helo">> => [string()], +%% <<"To">> => list([string()]()) +%% } +-type envelope() :: #{binary() => any()}. + %% Example: %% tag() :: #{ %% <<"Key">> => string(), @@ -672,12 +696,16 @@ %% <<"ArchivedMessageId">> => string(), %% <<"Cc">> => [string()], %% <<"Date">> => [string()], +%% <<"Envelope">> => envelope(), %% <<"From">> => [string()], %% <<"HasAttachments">> => [boolean()], %% <<"InReplyTo">> => [string()], +%% <<"IngressPointId">> => string(), %% <<"MessageId">> => [string()], %% <<"ReceivedHeaders">> => list([string()]()), %% <<"ReceivedTimestamp">> => [non_neg_integer()], +%% <<"SenderHostname">> => [string()], +%% <<"SenderIpAddress">> => string(), %% <<"Subject">> => [string()], %% <<"To">> => [string()], %% <<"XMailer">> => [string()], diff --git a/src/aws_securitylake.erl b/src/aws_securitylake.erl index b2e92187..db4c7a8f 100644 --- a/src/aws_securitylake.erl +++ b/src/aws_securitylake.erl @@ -46,16 +46,16 @@ %% %% Security Lake automates the collection of security-related log and event %% data from -%% integrated Amazon Web Services and third-party services. It also helps you -%% manage +%% integrated Amazon Web Services services and third-party services. It also +%% helps you manage %% the lifecycle of data with customizable retention and replication %% settings. Security Lake %% converts ingested data into Apache Parquet format and a standard %% open-source schema called %% the Open Cybersecurity Schema Framework (OCSF). %% -%% Other Amazon Web Services and third-party services can subscribe to the -%% data that's stored in Security Lake for +%% Other Amazon Web Services services and third-party services can subscribe +%% to the data that's stored in Security Lake for %% incident response and security data analytics. -module(aws_securitylake). @@ -1074,8 +1074,8 @@ %% API %%==================================================================== -%% @doc Adds a natively supported Amazon Web Service as an Amazon Security -%% Lake source. +%% @doc Adds a natively supported Amazon Web Services service as an Amazon +%% Security Lake source. %% %% Enables %% source types for member accounts in required Amazon Web Services Regions, @@ -1083,11 +1083,11 @@ %% parameters you specify. You can choose any source type in any Region for %% either accounts %% that are part of a trusted organization or standalone accounts. Once you -%% add an Amazon Web Service as a source, Security Lake starts collecting -%% logs and events from it. +%% add an Amazon Web Services service as a source, Security Lake starts +%% collecting logs and events from it. %% %% You can use this API only to enable natively supported Amazon Web Services -%% as a +%% services as a %% source. Use `CreateCustomLogSource' to enable data collection from a %% custom %% source. @@ -1189,7 +1189,8 @@ create_custom_log_source(Client, Input0, Options0) -> %% configurations. %% %% When you enable Security Lake, it starts ingesting security data after the -%% `CreateAwsLogSource' call. This includes ingesting security data from +%% `CreateAwsLogSource' call and after you create subscribers using the +%% `CreateSubscriber' API. This includes ingesting security data from %% sources, storing data, and making data accessible to subscribers. Security %% Lake also enables %% all the existing settings and resources that it stores or maintains for @@ -1234,6 +1235,9 @@ create_data_lake(Client, Input0, Options0) -> %% @doc Creates the specified notification subscription in Amazon Security %% Lake for the organization %% you specify. +%% +%% The notification subscription is created for exceptions that cannot be +%% resolved by Security Lake automatically. -spec create_data_lake_exception_subscription(aws_client:aws_client(), create_data_lake_exception_subscription_request()) -> {ok, create_data_lake_exception_subscription_response(), tuple()} | {error, any()} | @@ -1306,12 +1310,12 @@ create_data_lake_organization_configuration(Client, Input0, Options0) -> request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). -%% @doc Creates a subscription permission for accounts that are already -%% enabled in -%% Amazon Security Lake. +%% @doc Creates a subscriber for accounts that are already enabled in Amazon +%% Security Lake. %% -%% You can create a subscriber with access to data in the current Amazon Web -%% Services Region. +%% You can +%% create a subscriber with access to data in the current Amazon Web Services +%% Region. -spec create_subscriber(aws_client:aws_client(), create_subscriber_request()) -> {ok, create_subscriber_response(), tuple()} | {error, any()} | @@ -1384,8 +1388,8 @@ create_subscriber_notification(Client, SubscriberId, Input0, Options0) -> request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). -%% @doc Removes a natively supported Amazon Web Service as an Amazon Security -%% Lake source. +%% @doc Removes a natively supported Amazon Web Services service as an Amazon +%% Security Lake source. %% %% You %% can remove a source for one or more Regions. When you remove the source, @@ -1639,7 +1643,7 @@ delete_subscriber(Client, SubscriberId, Input0, Options0) -> request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). -%% @doc Deletes the specified notification subscription in Amazon Security +%% @doc Deletes the specified subscription notification in Amazon Security %% Lake for the organization %% you specify. -spec delete_subscriber_notification(aws_client:aws_client(), binary() | list(), delete_subscriber_notification_request()) -> @@ -1715,8 +1719,8 @@ deregister_data_lake_delegated_administrator(Client, Input0, Options0) -> request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). -%% @doc Retrieves the details of exception notifications for the account in -%% Amazon Security Lake. +%% @doc Retrieves the protocol and endpoint that were provided when +%% subscribing to Amazon SNS topics for exception notifications. -spec get_data_lake_exception_subscription(aws_client:aws_client()) -> {ok, get_data_lake_exception_subscription_response(), tuple()} | {error, any()} | @@ -1954,7 +1958,7 @@ list_data_lakes(Client, QueryMap, HeadersMap, Options0) request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode). -%% @doc Retrieves the log sources in the current Amazon Web Services Region. +%% @doc Retrieves the log sources. -spec list_log_sources(aws_client:aws_client(), list_log_sources_request()) -> {ok, list_log_sources_response(), tuple()} | {error, any()} | @@ -1988,7 +1992,7 @@ list_log_sources(Client, Input0, Options0) -> request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). -%% @doc List all subscribers for the specific Amazon Security Lake account +%% @doc Lists all subscribers for the specific Amazon Security Lake account %% ID. %% %% You can retrieve a list @@ -2204,10 +2208,39 @@ untag_resource(Client, ResourceArn, Input0, Options0) -> {Query_, Input} = aws_request:build_headers(QueryMapping, Input2), request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). -%% @doc Specifies where to store your security data and for how long. +%% @doc You can use `UpdateDataLake' to specify where to store your +%% security data, how it should +%% be encrypted at rest and for how long. +%% +%% You can add a Rollup +%% Region: +%% https://docs.aws.amazon.com/security-lake/latest/userguide/manage-regions.html#add-rollup-region +%% to consolidate data from multiple Amazon Web Services Regions, replace +%% default encryption (SSE-S3) with Customer Manged Key: +%% https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk, +%% or specify transition and expiration actions through storage Lifecycle +%% management: +%% https://docs.aws.amazon.com/security-lake/latest/userguide/lifecycle-management.html. +%% The `UpdateDataLake' API works as an "upsert" operation that +%% performs an insert if the specified item or record does not exist, or an +%% update if it +%% already exists. Security Lake securely stores your data at rest using +%% Amazon Web Services encryption solutions. For more details, see Data +%% protection in Amazon Security Lake: +%% https://docs.aws.amazon.com/security-lake/latest/userguide/data-protection.html. +%% +%% For example, omitting the key `encryptionConfiguration' from a Region +%% that is +%% included in an update call that currently uses KMS will leave that +%% Region's KMS key in +%% place, but specifying `encryptionConfiguration: {kmsKeyId: +%% 'S3_MANAGED_KEY'}' +%% for that same Region will reset the key to `S3-managed'. %% -%% You can add a rollup -%% Region to consolidate data from multiple Amazon Web Services Regions. +%% For more details about lifecycle management and how to update retention +%% settings for one or more Regions after enabling Security Lake, see the +%% Amazon Security Lake User Guide: +%% https://docs.aws.amazon.com/security-lake/latest/userguide/lifecycle-management.html. -spec update_data_lake(aws_client:aws_client(), update_data_lake_request()) -> {ok, update_data_lake_response(), tuple()} | {error, any()} | diff --git a/src/aws_supplychain.erl b/src/aws_supplychain.erl index d19a5562..cf90fccf 100644 --- a/src/aws_supplychain.erl +++ b/src/aws_supplychain.erl @@ -24,10 +24,14 @@ create_data_integration_flow/5, create_data_lake_dataset/5, create_data_lake_dataset/6, + create_instance/2, + create_instance/3, delete_data_integration_flow/4, delete_data_integration_flow/5, delete_data_lake_dataset/5, delete_data_lake_dataset/6, + delete_instance/3, + delete_instance/4, get_bill_of_materials_import_job/3, get_bill_of_materials_import_job/5, get_bill_of_materials_import_job/6, @@ -37,12 +41,18 @@ get_data_lake_dataset/4, get_data_lake_dataset/6, get_data_lake_dataset/7, + get_instance/2, + get_instance/4, + get_instance/5, list_data_integration_flows/2, list_data_integration_flows/4, list_data_integration_flows/5, list_data_lake_datasets/3, list_data_lake_datasets/5, list_data_lake_datasets/6, + list_instances/1, + list_instances/3, + list_instances/4, list_tags_for_resource/2, list_tags_for_resource/4, list_tags_for_resource/5, @@ -55,7 +65,9 @@ update_data_integration_flow/4, update_data_integration_flow/5, update_data_lake_dataset/5, - update_data_lake_dataset/6]). + update_data_lake_dataset/6, + update_instance/3, + update_instance/4]). -include_lib("hackney/include/hackney_lib.hrl"). @@ -137,6 +149,13 @@ -type create_data_lake_dataset_request() :: #{binary() => any()}. +%% Example: +%% create_instance_response() :: #{ +%% <<"instance">> => instance() +%% } +-type create_instance_response() :: #{binary() => any()}. + + %% Example: %% delete_data_integration_flow_response() :: #{ %% <<"instanceId">> => string(), @@ -181,6 +200,10 @@ %% } -type update_data_integration_flow_response() :: #{binary() => any()}. +%% Example: +%% get_instance_request() :: #{} +-type get_instance_request() :: #{}. + %% Example: %% create_bill_of_materials_import_job_response() :: #{ @@ -265,6 +288,21 @@ -type get_data_integration_flow_request() :: #{}. +%% Example: +%% update_instance_request() :: #{ +%% <<"instanceDescription">> => string(), +%% <<"instanceName">> => string() +%% } +-type update_instance_request() :: #{binary() => any()}. + + +%% Example: +%% get_instance_response() :: #{ +%% <<"instance">> => instance() +%% } +-type get_instance_response() :: #{binary() => any()}. + + %% Example: %% data_lake_dataset_schema() :: #{ %% <<"fields">> => list(data_lake_dataset_schema_field()()), @@ -296,6 +334,16 @@ -type create_bill_of_materials_import_job_request() :: #{binary() => any()}. +%% Example: +%% list_instances_request() :: #{ +%% <<"instanceNameFilter">> => list(string()()), +%% <<"instanceStateFilter">> => list(list(any())()), +%% <<"maxResults">> => integer(), +%% <<"nextToken">> => string() +%% } +-type list_instances_request() :: #{binary() => any()}. + + %% Example: %% create_data_lake_dataset_response() :: #{ %% <<"dataset">> => data_lake_dataset() @@ -303,6 +351,14 @@ -type create_data_lake_dataset_response() :: #{binary() => any()}. +%% Example: +%% list_instances_response() :: #{ +%% <<"instances">> => list(instance()()), +%% <<"nextToken">> => string() +%% } +-type list_instances_response() :: #{binary() => any()}. + + %% Example: %% internal_server_exception() :: #{ %% <<"message">> => [string()] @@ -325,6 +381,33 @@ -type get_data_integration_flow_response() :: #{binary() => any()}. +%% Example: +%% create_instance_request() :: #{ +%% <<"clientToken">> => string(), +%% <<"instanceDescription">> => string(), +%% <<"instanceName">> => string(), +%% <<"kmsKeyArn">> => string(), +%% <<"tags">> => map() +%% } +-type create_instance_request() :: #{binary() => any()}. + + +%% Example: +%% instance() :: #{ +%% <<"awsAccountId">> => string(), +%% <<"createdTime">> => [non_neg_integer()], +%% <<"instanceDescription">> => string(), +%% <<"instanceId">> => string(), +%% <<"instanceName">> => string(), +%% <<"kmsKeyArn">> => string(), +%% <<"lastModifiedTime">> => [non_neg_integer()], +%% <<"state">> => list(any()), +%% <<"versionNumber">> => [float()], +%% <<"webAppDnsDomain">> => string() +%% } +-type instance() :: #{binary() => any()}. + + %% Example: %% access_denied_exception() :: #{ %% <<"message">> => [string()] @@ -336,6 +419,13 @@ -type tag_resource_response() :: #{}. +%% Example: +%% delete_instance_response() :: #{ +%% <<"instance">> => instance() +%% } +-type delete_instance_response() :: #{binary() => any()}. + + %% Example: %% send_data_integration_event_response() :: #{ %% <<"eventId">> => string() @@ -360,6 +450,10 @@ %% } -type throttling_exception() :: #{binary() => any()}. +%% Example: +%% delete_instance_request() :: #{} +-type delete_instance_request() :: #{}. + %% Example: %% list_data_integration_flows_response() :: #{ @@ -410,6 +504,13 @@ -type list_data_integration_flows_request() :: #{binary() => any()}. +%% Example: +%% update_instance_response() :: #{ +%% <<"instance">> => instance() +%% } +-type update_instance_response() :: #{binary() => any()}. + + %% Example: %% update_data_integration_flow_request() :: #{ %% <<"sources">> => list(data_integration_flow_source()()), @@ -491,6 +592,14 @@ service_quota_exceeded_exception() | conflict_exception(). +-type create_instance_errors() :: + throttling_exception() | + validation_exception() | + access_denied_exception() | + internal_server_exception() | + service_quota_exceeded_exception() | + conflict_exception(). + -type delete_data_integration_flow_errors() :: throttling_exception() | access_denied_exception() | @@ -504,6 +613,13 @@ internal_server_exception() | resource_not_found_exception(). +-type delete_instance_errors() :: + throttling_exception() | + validation_exception() | + access_denied_exception() | + internal_server_exception() | + resource_not_found_exception(). + -type get_bill_of_materials_import_job_errors() :: throttling_exception() | validation_exception() | @@ -525,6 +641,13 @@ internal_server_exception() | resource_not_found_exception(). +-type get_instance_errors() :: + throttling_exception() | + validation_exception() | + access_denied_exception() | + internal_server_exception() | + resource_not_found_exception(). + -type list_data_integration_flows_errors() :: throttling_exception() | validation_exception() | @@ -538,6 +661,12 @@ internal_server_exception() | resource_not_found_exception(). +-type list_instances_errors() :: + throttling_exception() | + validation_exception() | + access_denied_exception() | + internal_server_exception(). + -type list_tags_for_resource_errors() :: throttling_exception() | validation_exception() | @@ -582,6 +711,13 @@ internal_server_exception() | resource_not_found_exception(). +-type update_instance_errors() :: + throttling_exception() | + validation_exception() | + access_denied_exception() | + internal_server_exception() | + resource_not_found_exception(). + %%==================================================================== %% API %%==================================================================== @@ -697,6 +833,46 @@ create_data_lake_dataset(Client, InstanceId, Name, Namespace, Input0, Options0) request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). +%% @doc Create a new instance for AWS Supply Chain. +%% +%% This is an asynchronous operation. Upon receiving a CreateInstance +%% request, AWS Supply Chain immediately returns the instance resource, with +%% instance ID, and the initializing state while simultaneously creating all +%% required Amazon Web Services resources for an instance creation. You can +%% use GetInstance to check the status of the instance. +-spec create_instance(aws_client:aws_client(), create_instance_request()) -> + {ok, create_instance_response(), tuple()} | + {error, any()} | + {error, create_instance_errors(), tuple()}. +create_instance(Client, Input) -> + create_instance(Client, Input, []). + +-spec create_instance(aws_client:aws_client(), create_instance_request(), proplists:proplist()) -> + {ok, create_instance_response(), tuple()} | + {error, any()} | + {error, create_instance_errors(), tuple()}. +create_instance(Client, Input0, Options0) -> + Method = post, + Path = ["/api/instance"], + SuccessStatusCode = 200, + {SendBodyAsBinary, Options1} = proplists_take(send_body_as_binary, Options0, false), + {ReceiveBodyAsBinary, Options2} = proplists_take(receive_body_as_binary, Options1, false), + Options = [{send_body_as_binary, SendBodyAsBinary}, + {receive_body_as_binary, ReceiveBodyAsBinary}, + {append_sha256_content_hash, false} + | Options2], + + Headers = [], + Input1 = Input0, + + CustomHeaders = [], + Input2 = Input1, + + Query_ = [], + Input = Input2, + + request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). + %% @doc Delete the DataIntegrationFlow. -spec delete_data_integration_flow(aws_client:aws_client(), binary() | list(), binary() | list(), delete_data_integration_flow_request()) -> {ok, delete_data_integration_flow_response(), tuple()} | @@ -765,6 +941,46 @@ delete_data_lake_dataset(Client, InstanceId, Name, Namespace, Input0, Options0) request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). +%% @doc Delete the instance. +%% +%% This is an asynchronous operation. Upon receiving a DeleteInstance +%% request, AWS Supply Chain immediately returns a response with the instance +%% resource, delete state while cleaning up all Amazon Web Services resources +%% created during the instance creation process. You can use the GetInstance +%% action to check the instance status. +-spec delete_instance(aws_client:aws_client(), binary() | list(), delete_instance_request()) -> + {ok, delete_instance_response(), tuple()} | + {error, any()} | + {error, delete_instance_errors(), tuple()}. +delete_instance(Client, InstanceId, Input) -> + delete_instance(Client, InstanceId, Input, []). + +-spec delete_instance(aws_client:aws_client(), binary() | list(), delete_instance_request(), proplists:proplist()) -> + {ok, delete_instance_response(), tuple()} | + {error, any()} | + {error, delete_instance_errors(), tuple()}. +delete_instance(Client, InstanceId, Input0, Options0) -> + Method = delete, + Path = ["/api/instance/", aws_util:encode_uri(InstanceId), ""], + SuccessStatusCode = 200, + {SendBodyAsBinary, Options1} = proplists_take(send_body_as_binary, Options0, false), + {ReceiveBodyAsBinary, Options2} = proplists_take(receive_body_as_binary, Options1, false), + Options = [{send_body_as_binary, SendBodyAsBinary}, + {receive_body_as_binary, ReceiveBodyAsBinary}, + {append_sha256_content_hash, false} + | Options2], + + Headers = [], + Input1 = Input0, + + CustomHeaders = [], + Input2 = Input1, + + Query_ = [], + Input = Input2, + + request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). + %% @doc Get status and details of a BillOfMaterialsImportJob. -spec get_bill_of_materials_import_job(aws_client:aws_client(), binary() | list(), binary() | list()) -> {ok, get_bill_of_materials_import_job_response(), tuple()} | @@ -876,6 +1092,43 @@ get_data_lake_dataset(Client, InstanceId, Name, Namespace, QueryMap, HeadersMap, request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode). +%% @doc Get the AWS Supply Chain instance details. +-spec get_instance(aws_client:aws_client(), binary() | list()) -> + {ok, get_instance_response(), tuple()} | + {error, any()} | + {error, get_instance_errors(), tuple()}. +get_instance(Client, InstanceId) + when is_map(Client) -> + get_instance(Client, InstanceId, #{}, #{}). + +-spec get_instance(aws_client:aws_client(), binary() | list(), map(), map()) -> + {ok, get_instance_response(), tuple()} | + {error, any()} | + {error, get_instance_errors(), tuple()}. +get_instance(Client, InstanceId, QueryMap, HeadersMap) + when is_map(Client), is_map(QueryMap), is_map(HeadersMap) -> + get_instance(Client, InstanceId, QueryMap, HeadersMap, []). + +-spec get_instance(aws_client:aws_client(), binary() | list(), map(), map(), proplists:proplist()) -> + {ok, get_instance_response(), tuple()} | + {error, any()} | + {error, get_instance_errors(), tuple()}. +get_instance(Client, InstanceId, QueryMap, HeadersMap, Options0) + when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) -> + Path = ["/api/instance/", aws_util:encode_uri(InstanceId), ""], + SuccessStatusCode = 200, + {SendBodyAsBinary, Options1} = proplists_take(send_body_as_binary, Options0, false), + {ReceiveBodyAsBinary, Options2} = proplists_take(receive_body_as_binary, Options1, false), + Options = [{send_body_as_binary, SendBodyAsBinary}, + {receive_body_as_binary, ReceiveBodyAsBinary} + | Options2], + + Headers = [], + + Query_ = [], + + request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode). + %% @doc Lists all the DataIntegrationFlows in a paginated way. -spec list_data_integration_flows(aws_client:aws_client(), binary() | list()) -> {ok, list_data_integration_flows_response(), tuple()} | @@ -960,6 +1213,50 @@ list_data_lake_datasets(Client, InstanceId, Namespace, QueryMap, HeadersMap, Opt request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode). +%% @doc List all the AWS Supply Chain instances in a paginated way. +-spec list_instances(aws_client:aws_client()) -> + {ok, list_instances_response(), tuple()} | + {error, any()} | + {error, list_instances_errors(), tuple()}. +list_instances(Client) + when is_map(Client) -> + list_instances(Client, #{}, #{}). + +-spec list_instances(aws_client:aws_client(), map(), map()) -> + {ok, list_instances_response(), tuple()} | + {error, any()} | + {error, list_instances_errors(), tuple()}. +list_instances(Client, QueryMap, HeadersMap) + when is_map(Client), is_map(QueryMap), is_map(HeadersMap) -> + list_instances(Client, QueryMap, HeadersMap, []). + +-spec list_instances(aws_client:aws_client(), map(), map(), proplists:proplist()) -> + {ok, list_instances_response(), tuple()} | + {error, any()} | + {error, list_instances_errors(), tuple()}. +list_instances(Client, QueryMap, HeadersMap, Options0) + when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) -> + Path = ["/api/instance"], + SuccessStatusCode = 200, + {SendBodyAsBinary, Options1} = proplists_take(send_body_as_binary, Options0, false), + {ReceiveBodyAsBinary, Options2} = proplists_take(receive_body_as_binary, Options1, false), + Options = [{send_body_as_binary, SendBodyAsBinary}, + {receive_body_as_binary, ReceiveBodyAsBinary} + | Options2], + + Headers = [], + + Query0_ = + [ + {<<"instanceNameFilter">>, maps:get(<<"instanceNameFilter">>, QueryMap, undefined)}, + {<<"instanceStateFilter">>, maps:get(<<"instanceStateFilter">>, QueryMap, undefined)}, + {<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)}, + {<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)} + ], + Query_ = [H || {_, V} = H <- Query0_, V =/= undefined], + + request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode). + %% @doc List all the tags for an Amazon Web ServicesSupply Chain resource. -spec list_tags_for_resource(aws_client:aws_client(), binary() | list()) -> {ok, list_tags_for_resource_response(), tuple()} | @@ -1174,6 +1471,40 @@ update_data_lake_dataset(Client, InstanceId, Name, Namespace, Input0, Options0) request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). +%% @doc Update the instance. +-spec update_instance(aws_client:aws_client(), binary() | list(), update_instance_request()) -> + {ok, update_instance_response(), tuple()} | + {error, any()} | + {error, update_instance_errors(), tuple()}. +update_instance(Client, InstanceId, Input) -> + update_instance(Client, InstanceId, Input, []). + +-spec update_instance(aws_client:aws_client(), binary() | list(), update_instance_request(), proplists:proplist()) -> + {ok, update_instance_response(), tuple()} | + {error, any()} | + {error, update_instance_errors(), tuple()}. +update_instance(Client, InstanceId, Input0, Options0) -> + Method = patch, + Path = ["/api/instance/", aws_util:encode_uri(InstanceId), ""], + SuccessStatusCode = 200, + {SendBodyAsBinary, Options1} = proplists_take(send_body_as_binary, Options0, false), + {ReceiveBodyAsBinary, Options2} = proplists_take(receive_body_as_binary, Options1, false), + Options = [{send_body_as_binary, SendBodyAsBinary}, + {receive_body_as_binary, ReceiveBodyAsBinary}, + {append_sha256_content_hash, false} + | Options2], + + Headers = [], + Input1 = Input0, + + CustomHeaders = [], + Input2 = Input1, + + Query_ = [], + Input = Input2, + + request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode). + %%==================================================================== %% Internal functions %%==================================================================== diff --git a/src/aws_transfer.erl b/src/aws_transfer.erl index 9d9d787d..f9981c9a 100644 --- a/src/aws_transfer.erl +++ b/src/aws_transfer.erl @@ -16,8 +16,8 @@ %% Amazon Route 53 so %% nothing changes for your customers and partners, or their applications. %% With your data in -%% Amazon S3, you can use it with Amazon Web Services for processing, -%% analytics, machine learning, and +%% Amazon S3, you can use it with Amazon Web Services services for +%% processing, analytics, machine learning, and %% archiving. Getting started with Transfer Family is easy since there is no %% infrastructure to buy and %% set up. @@ -95,6 +95,8 @@ list_connectors/3, list_executions/2, list_executions/3, + list_file_transfer_results/2, + list_file_transfer_results/3, list_host_keys/2, list_host_keys/3, list_profiles/2, @@ -472,6 +474,15 @@ %% } -type user_details() :: #{binary() => any()}. +%% Example: +%% connector_file_transfer_result() :: #{ +%% <<"FailureCode">> => string(), +%% <<"FailureMessage">> => string(), +%% <<"FilePath">> => string(), +%% <<"StatusCode">> => list(any()) +%% } +-type connector_file_transfer_result() :: #{binary() => any()}. + %% Example: %% describe_execution_response() :: #{ %% <<"Execution">> => described_execution(), @@ -645,6 +656,15 @@ %% } -type create_profile_request() :: #{binary() => any()}. +%% Example: +%% list_file_transfer_results_request() :: #{ +%% <<"ConnectorId">> := string(), +%% <<"MaxResults">> => integer(), +%% <<"NextToken">> => string(), +%% <<"TransferId">> := string() +%% } +-type list_file_transfer_results_request() :: #{binary() => any()}. + %% Example: %% described_user() :: #{ %% <<"Arn">> => string(), @@ -891,6 +911,13 @@ %% } -type service_unavailable_exception() :: #{binary() => any()}. +%% Example: +%% list_file_transfer_results_response() :: #{ +%% <<"FileTransferResults">> => list(connector_file_transfer_result()()), +%% <<"NextToken">> => string() +%% } +-type list_file_transfer_results_response() :: #{binary() => any()}. + %% Example: %% tag_step_details() :: #{ %% <<"Name">> => string(), @@ -1787,6 +1814,12 @@ invalid_request_exception() | resource_not_found_exception(). +-type list_file_transfer_results_errors() :: + internal_service_error() | + service_unavailable_exception() | + invalid_request_exception() | + resource_not_found_exception(). + -type list_host_keys_errors() :: internal_service_error() | service_unavailable_exception() | @@ -2722,6 +2755,31 @@ list_executions(Client, Input, Options) when is_map(Client), is_map(Input), is_list(Options) -> request(Client, <<"ListExecutions">>, Input, Options). +%% @doc +%% Returns real-time updates and detailed information on the status of each +%% individual file being transferred in a specific file transfer operation. +%% +%% You specify the file transfer by providing its `ConnectorId' and its +%% `TransferId'. +%% +%% File transfer results are available up to 7 days after an operation has +%% been requested. +-spec list_file_transfer_results(aws_client:aws_client(), list_file_transfer_results_request()) -> + {ok, list_file_transfer_results_response(), tuple()} | + {error, any()} | + {error, list_file_transfer_results_errors(), tuple()}. +list_file_transfer_results(Client, Input) + when is_map(Client), is_map(Input) -> + list_file_transfer_results(Client, Input, []). + +-spec list_file_transfer_results(aws_client:aws_client(), list_file_transfer_results_request(), proplists:proplist()) -> + {ok, list_file_transfer_results_response(), tuple()} | + {error, any()} | + {error, list_file_transfer_results_errors(), tuple()}. +list_file_transfer_results(Client, Input, Options) + when is_map(Client), is_map(Input), is_list(Options) -> + request(Client, <<"ListFileTransferResults">>, Input, Options). + %% @doc Returns a list of host keys for the server that's specified by %% the `ServerId' %% parameter.