Skip to content

Commit

Permalink
Update services based on release-2024-10-01 of AWS Go SDK
Browse files Browse the repository at this point in the history
  • Loading branch information
web-flow committed Oct 2, 2024
1 parent 852fd12 commit 351474b
Show file tree
Hide file tree
Showing 4 changed files with 90 additions and 19 deletions.
2 changes: 1 addition & 1 deletion .latest-tag-aws-sdk-go
Original file line number Diff line number Diff line change
@@ -1 +1 @@
release-2024-09-30
release-2024-10-01
92 changes: 78 additions & 14 deletions src/aws_bedrock_agent.erl
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,8 @@
prepare_flow/4,
start_ingestion_job/4,
start_ingestion_job/5,
stop_ingestion_job/5,
stop_ingestion_job/6,
tag_resource/3,
tag_resource/4,
untag_resource/3,
Expand Down Expand Up @@ -562,6 +564,10 @@
%% }
-type delete_flow_request() :: #{binary() => any()}.

%% Example:
%% stop_ingestion_job_request() :: #{}
-type stop_ingestion_job_request() :: #{}.


%% Example:
%% update_knowledge_base_request() :: #{
Expand Down Expand Up @@ -1981,6 +1987,13 @@
-type list_flow_aliases_response() :: #{binary() => any()}.


%% Example:
%% stop_ingestion_job_response() :: #{
%% <<"ingestionJob">> => ingestion_job()
%% }
-type stop_ingestion_job_response() :: #{binary() => any()}.


%% Example:
%% get_agent_response() :: #{
%% <<"agent">> => agent()
Expand Down Expand Up @@ -2701,6 +2714,14 @@
resource_not_found_exception() |
conflict_exception().

-type stop_ingestion_job_errors() ::
throttling_exception() |
validation_exception() |
access_denied_exception() |
internal_server_exception() |
resource_not_found_exception() |
conflict_exception().

-type tag_resource_errors() ::
throttling_exception() |
validation_exception() |
Expand Down Expand Up @@ -3149,13 +3170,13 @@ create_flow_version(Client, FlowIdentifier, Input0, Options0) ->

request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).

%% @doc Creates a knowledge base that contains data sources from which
%% information can be queried and used by LLMs.
%% @doc Creates a knowledge base.
%%
%% To create a knowledge base, you must first set up your data sources and
%% configure a supported vector store. For more information, see Set up your
%% data for ingestion:
%% https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-setup.html.
%% A knowledge base contains your data sources so that Large Language Models
%% (LLMs) can use your data. To create a knowledge base, you must first set
%% up your data sources and configure a supported vector store. For more
%% information, see Set up a knowledge base:
%% https://docs.aws.amazon.com/bedrock/latest/userguide/knowlege-base-prereq.html.
%%
%% If you prefer to let Amazon Bedrock create and manage a vector store for
%% you in Amazon OpenSearch Service, use the console. For more information,
Expand Down Expand Up @@ -4043,8 +4064,10 @@ get_flow_version(Client, FlowIdentifier, FlowVersion, QueryMap, HeadersMap, Opti

request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).

%% @doc Gets information about a ingestion job, in which a data source is
%% added to a knowledge base.
%% @doc Gets information about a data ingestion job.
%%
%% Data sources are ingested into your knowledge base so that Large Lanaguage
%% Models (LLMs) can use your data.
-spec get_ingestion_job(aws_client:aws_client(), binary() | list(), binary() | list(), binary() | list()) ->
{ok, get_ingestion_job_response(), tuple()} |
{error, any()} |
Expand Down Expand Up @@ -4509,8 +4532,9 @@ list_flows(Client, QueryMap, HeadersMap, Options0)

request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).

%% @doc Lists the ingestion jobs for a data source and information about each
%% of them.
%% @doc Lists the data ingestion jobs for a data source.
%%
%% The list also includes information about each job.
-spec list_ingestion_jobs(aws_client:aws_client(), binary() | list(), binary() | list(), list_ingestion_jobs_request()) ->
{ok, list_ingestion_jobs_response(), tuple()} |
{error, any()} |
Expand Down Expand Up @@ -4544,8 +4568,9 @@ list_ingestion_jobs(Client, DataSourceId, KnowledgeBaseId, Input0, Options0) ->

request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).

%% @doc Lists the knowledge bases in an account and information about each of
%% them.
%% @doc Lists the knowledge bases in an account.
%%
%% The list also includesinformation about each knowledge base.
-spec list_knowledge_bases(aws_client:aws_client(), list_knowledge_bases_request()) ->
{ok, list_knowledge_bases_response(), tuple()} |
{error, any()} |
Expand Down Expand Up @@ -4740,8 +4765,10 @@ prepare_flow(Client, FlowIdentifier, Input0, Options0) ->

request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).

%% @doc Begins an ingestion job, in which a data source is added to a
%% knowledge base.
%% @doc Begins a data ingestion job.
%%
%% Data sources are ingested into your knowledge base so that Large Language
%% Models (LLMs) can use your data.
-spec start_ingestion_job(aws_client:aws_client(), binary() | list(), binary() | list(), start_ingestion_job_request()) ->
{ok, start_ingestion_job_response(), tuple()} |
{error, any()} |
Expand Down Expand Up @@ -4775,6 +4802,43 @@ start_ingestion_job(Client, DataSourceId, KnowledgeBaseId, Input0, Options0) ->

request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).

%% @doc Stops a currently running data ingestion job.
%%
%% You can send a `StartIngestionJob' request again to ingest the rest of
%% your data when you are ready.
-spec stop_ingestion_job(aws_client:aws_client(), binary() | list(), binary() | list(), binary() | list(), stop_ingestion_job_request()) ->
{ok, stop_ingestion_job_response(), tuple()} |
{error, any()} |
{error, stop_ingestion_job_errors(), tuple()}.
stop_ingestion_job(Client, DataSourceId, IngestionJobId, KnowledgeBaseId, Input) ->
stop_ingestion_job(Client, DataSourceId, IngestionJobId, KnowledgeBaseId, Input, []).

-spec stop_ingestion_job(aws_client:aws_client(), binary() | list(), binary() | list(), binary() | list(), stop_ingestion_job_request(), proplists:proplist()) ->
{ok, stop_ingestion_job_response(), tuple()} |
{error, any()} |
{error, stop_ingestion_job_errors(), tuple()}.
stop_ingestion_job(Client, DataSourceId, IngestionJobId, KnowledgeBaseId, Input0, Options0) ->
Method = post,
Path = ["/knowledgebases/", aws_util:encode_uri(KnowledgeBaseId), "/datasources/", aws_util:encode_uri(DataSourceId), "/ingestionjobs/", aws_util:encode_uri(IngestionJobId), "/stop"],
SuccessStatusCode = 202,
{SendBodyAsBinary, Options1} = proplists_take(send_body_as_binary, Options0, false),
{ReceiveBodyAsBinary, Options2} = proplists_take(receive_body_as_binary, Options1, false),
Options = [{send_body_as_binary, SendBodyAsBinary},
{receive_body_as_binary, ReceiveBodyAsBinary},
{append_sha256_content_hash, false}
| Options2],

Headers = [],
Input1 = Input0,

CustomHeaders = [],
Input2 = Input1,

Query_ = [],
Input = Input2,

request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).

%% @doc Associate tags with a resource.
%%
%% For more information, see Tagging resources:
Expand Down
2 changes: 2 additions & 0 deletions src/aws_codeartifact.erl
Original file line number Diff line number Diff line change
Expand Up @@ -1278,6 +1278,7 @@
%% get_repository_endpoint_request() :: #{
%% <<"domain">> := string(),
%% <<"domainOwner">> => string(),
%% <<"endpointType">> => list(any()),
%% <<"format">> := list(any()),
%% <<"repository">> := string()
%% }
Expand Down Expand Up @@ -3241,6 +3242,7 @@ get_repository_endpoint(Client, Domain, Format, Repository, QueryMap, HeadersMap
[
{<<"domain">>, Domain},
{<<"domain-owner">>, maps:get(<<"domain-owner">>, QueryMap, undefined)},
{<<"endpointType">>, maps:get(<<"endpointType">>, QueryMap, undefined)},
{<<"format">>, Format},
{<<"repository">>, Repository}
],
Expand Down
13 changes: 9 additions & 4 deletions src/aws_rds.erl
Original file line number Diff line number Diff line change
Expand Up @@ -2350,7 +2350,8 @@
%% <<"DBShardGroupIdentifier">> := string(),
%% <<"MaxACU">> := float(),
%% <<"MinACU">> => float(),
%% <<"PubliclyAccessible">> => boolean()
%% <<"PubliclyAccessible">> => boolean(),
%% <<"Tags">> => list(tag()())
%% }
-type create_db_shard_group_message() :: #{binary() => any()}.

Expand Down Expand Up @@ -3024,7 +3025,8 @@
%% <<"MaxACU">> => float(),
%% <<"MinACU">> => float(),
%% <<"PubliclyAccessible">> => boolean(),
%% <<"Status">> => string()
%% <<"Status">> => string(),
%% <<"TagList">> => list(tag()())
%% }
-type db_shard_group() :: #{binary() => any()}.

Expand Down Expand Up @@ -3408,7 +3410,8 @@
%% <<"StorageEncrypted">> => boolean(),
%% <<"EnableGlobalWriteForwarding">> => boolean(),
%% <<"DBSystemId">> => string(),
%% <<"DeletionProtection">> => boolean()
%% <<"DeletionProtection">> => boolean(),
%% <<"ClusterScalabilityType">> => list(any())
%% }
-type create_db_cluster_message() :: #{binary() => any()}.

Expand Down Expand Up @@ -4057,7 +4060,8 @@
%% <<"ReadReplicaIdentifiers">> => list(string()()),
%% <<"DBSystemId">> => string(),
%% <<"MultiAZ">> => boolean(),
%% <<"DeletionProtection">> => boolean()
%% <<"DeletionProtection">> => boolean(),
%% <<"ClusterScalabilityType">> => list(any())
%% }
-type db_cluster() :: #{binary() => any()}.

Expand Down Expand Up @@ -5761,6 +5765,7 @@
db_cluster_not_found_fault() |
unsupported_db_engine_version_fault() |
invalid_db_cluster_state_fault() |
network_type_not_supported() |
invalid_vpc_network_state_fault() |
db_shard_group_already_exists_fault() |
max_db_shard_group_limit_reached().
Expand Down

0 comments on commit 351474b

Please sign in to comment.