Skip to content

Commit

Permalink
Update services based on v1.44.308 of AWS Go SDK
Browse files Browse the repository at this point in the history
  • Loading branch information
web-flow committed Jul 26, 2023
1 parent 590ebac commit 63af407
Show file tree
Hide file tree
Showing 7 changed files with 194 additions and 38 deletions.
2 changes: 1 addition & 1 deletion .latest-tag-aws-sdk-go
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v1.44.307
v1.44.308
66 changes: 66 additions & 0 deletions lib/aws/generated/customer_profiles.ex
Original file line number Diff line number Diff line change
Expand Up @@ -732,6 +732,39 @@ defmodule AWS.CustomerProfiles do
Request.request_rest(client, meta, :get, url_path, query_params, headers, nil, options, nil)
end

@doc """
Returns a set of profiles that belong to the same matching group using the
`matchId` or `profileId`.
You can also specify the type of matching that you want for finding similar
profiles using either `RULE_BASED_MATCHING` or `ML_BASED_MATCHING`.
"""
def get_similar_profiles(%Client{} = client, domain_name, input, options \\ []) do
url_path = "/domains/#{AWS.Util.encode_uri(domain_name)}/matches"
headers = []

{query_params, input} =
[
{"MaxResults", "max-results"},
{"NextToken", "next-token"}
]
|> Request.build_params(input)

meta = metadata()

Request.request_rest(
client,
meta,
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end

@doc """
Get details of specified workflow.
"""
Expand Down Expand Up @@ -1113,6 +1146,39 @@ defmodule AWS.CustomerProfiles do
)
end

@doc """
Returns a set of `MatchIds` that belong to the given domain.
"""
def list_rule_based_matches(
%Client{} = client,
domain_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/domains/#{AWS.Util.encode_uri(domain_name)}/profiles/ruleBasedMatches"
headers = []
query_params = []

query_params =
if !is_nil(next_token) do
[{"next-token", next_token} | query_params]
else
query_params
end

query_params =
if !is_nil(max_results) do
[{"max-results", max_results} | query_params]
else
query_params
end

meta = metadata()

Request.request_rest(client, meta, :get, url_path, query_params, headers, nil, options, nil)
end

@doc """
Displays the tags associated with an Amazon Connect Customer Profiles resource.
Expand Down
49 changes: 43 additions & 6 deletions lib/aws/generated/data_sync.ex
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,22 @@ defmodule AWS.DataSync do
Request.request_post(client, meta, "CreateAgent", input, options)
end

@doc """
Creates an endpoint for a Microsoft Azure Blob Storage container that DataSync
can use as a transfer source or destination.
Before you begin, make sure you know [how DataSync accesses Azure Blob Storage](https://docs.aws.amazon.com/datasync/latest/userguide/creating-azure-blob-location.html#azure-blob-access)
and works with [access tiers](https://docs.aws.amazon.com/datasync/latest/userguide/creating-azure-blob-location.html#azure-blob-access-tiers)
and [blob types](https://docs.aws.amazon.com/datasync/latest/userguide/creating-azure-blob-location.html#blob-types).
You also need a [DataSync agent](https://docs.aws.amazon.com/datasync/latest/userguide/creating-azure-blob-location.html#azure-blob-creating-agent)
that can connect to your container.
"""
def create_location_azure_blob(%Client{} = client, input, options \\ []) do
meta = metadata()

Request.request_post(client, meta, "CreateLocationAzureBlob", input, options)
end

@doc """
Creates an endpoint for an Amazon EFS file system that DataSync can access for a
transfer.
Expand Down Expand Up @@ -150,8 +166,8 @@ defmodule AWS.DataSync do
end

@doc """
Defines a file system on a Network File System (NFS) server that can be read
from or written to.
Creates an endpoint for an Network File System (NFS) file server that DataSync
can use for a data transfer.
"""
def create_location_nfs(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down Expand Up @@ -192,9 +208,10 @@ defmodule AWS.DataSync do

@doc """
Creates an endpoint for a Server Message Block (SMB) file server that DataSync
can access for a transfer.
can use for a data transfer.
For more information, see [Creating an SMB location](https://docs.aws.amazon.com/datasync/latest/userguide/create-smb-location.html).
Before you begin, make sure that you understand how DataSync [accesses an SMB file
server](https://docs.aws.amazon.com/datasync/latest/userguide/create-smb-location.html).
"""
def create_location_smb(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down Expand Up @@ -271,6 +288,16 @@ defmodule AWS.DataSync do
Request.request_post(client, meta, "DescribeDiscoveryJob", input, options)
end

@doc """
Provides details about how an DataSync transfer location for Microsoft Azure
Blob Storage is configured.
"""
def describe_location_azure_blob(%Client{} = client, input, options \\ []) do
meta = metadata()

Request.request_post(client, meta, "DescribeLocationAzureBlob", input, options)
end

@doc """
Returns metadata about your DataSync location for an Amazon EFS file system.
"""
Expand Down Expand Up @@ -404,7 +431,7 @@ defmodule AWS.DataSync do
end

@doc """
Returns metadata about a task.
Provides information about an DataSync transfer task.
"""
def describe_task(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand All @@ -413,7 +440,7 @@ defmodule AWS.DataSync do
end

@doc """
Returns detailed metadata about a task that is being executed.
Provides information about an DataSync transfer task that's running.
"""
def describe_task_execution(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down Expand Up @@ -624,6 +651,16 @@ defmodule AWS.DataSync do
Request.request_post(client, meta, "UpdateDiscoveryJob", input, options)
end

@doc """
Modifies some configurations of the Microsoft Azure Blob Storage transfer
location that you're using with DataSync.
"""
def update_location_azure_blob(%Client{} = client, input, options \\ []) do
meta = metadata()

Request.request_post(client, meta, "UpdateLocationAzureBlob", input, options)
end

@doc """
Updates some parameters of a previously created location for a Hadoop
Distributed File System cluster.
Expand Down
47 changes: 31 additions & 16 deletions lib/aws/generated/dynamodb.ex
Original file line number Diff line number Diff line change
Expand Up @@ -1003,17 +1003,25 @@ defmodule AWS.DynamoDB do
To have DynamoDB return fewer items, you can provide a `FilterExpression`
operation.
If the total number of scanned items exceeds the maximum dataset size limit of 1
MB, the scan stops and results are returned to the user as a `LastEvaluatedKey`
value to continue the scan in a subsequent operation. The results also include
the number of items exceeding the limit. A scan can result in no table data
meeting the filter criteria.
A single `Scan` operation reads up to the maximum number of items set (if using
the `Limit` parameter) or a maximum of 1 MB of data and then apply any filtering
to the results using `FilterExpression`. If `LastEvaluatedKey` is present in the
response, you need to paginate the result set. For more information, see
[Paginating the Results](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Scan.html#Scan.Pagination)
If the total size of scanned items exceeds the maximum dataset size limit of 1
MB, the scan completes and results are returned to the user. The
`LastEvaluatedKey` value is also returned and the requestor can use the
`LastEvaluatedKey` to continue the scan in a subsequent operation. Each scan
response also includes number of items that were scanned (ScannedCount) as part
of the request. If using a `FilterExpression`, a scan result can result in no
items meeting the criteria and the `Count` will result in zero. If you did not
use a `FilterExpression` in the scan request, then `Count` is the same as
`ScannedCount`.
`Count` and `ScannedCount` only return the count of items specific to a single
scan request and, unless the table is less than 1MB, do not represent the total
number of items in the table.
A single `Scan` operation first reads up to the maximum number of items set (if
using the `Limit` parameter) or a maximum of 1 MB of data and then applies any
filtering to the results if a `FilterExpression` is provided. If
`LastEvaluatedKey` is present in the response, pagination is required to
complete the full table scan. For more information, see [Paginating the Results](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Scan.html#Scan.Pagination)
in the *Amazon DynamoDB Developer Guide*.
`Scan` operations proceed sequentially; however, for faster performance on a
Expand All @@ -1022,11 +1030,18 @@ defmodule AWS.DynamoDB do
information, see [Parallel Scan](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Scan.html#Scan.ParallelScan)
in the *Amazon DynamoDB Developer Guide*.
`Scan` uses eventually consistent reads when accessing the data in a table;
therefore, the result set might not include the changes to data in the table
immediately before the operation began. If you need a consistent copy of the
data, as of the time that the `Scan` begins, you can set the `ConsistentRead`
parameter to `true`.
By default, a `Scan` uses eventually consistent reads when accessing the items
in a table. Therefore, the results from an eventually consistent `Scan` may not
include the latest item changes at the time the scan iterates through each item
in the table. If you require a strongly consistent read of each item as the scan
iterates through the items in the table, you can set the `ConsistentRead`
parameter to true. Strong consistency only relates to the consistency of the
read at the item level.
DynamoDB does not provide snapshot isolation for a scan operation when the
`ConsistentRead` parameter is set to true. Thus, a DynamoDB scan operation does
not guarantee that all reads in a scan see a consistent snapshot of the table
when the scan operation was requested.
"""
def scan(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down
12 changes: 9 additions & 3 deletions lib/aws/generated/emr_serverless.ex
Original file line number Diff line number Diff line change
Expand Up @@ -131,10 +131,16 @@ defmodule AWS.EMRServerless do
end

@doc """
Returns a URL to access the job run dashboard.
Creates and returns a URL that you can use to access the application UIs for a
job run.
The generated URL is valid for one hour, after which you must invoke the API
again to generate a new URL.
For jobs in a running state, the application UI is a live user interface such as
the Spark or Tez web UI. For completed jobs, the application UI is a persistent
application user interface such as the Spark History Server or persistent Tez
UI.
The URL is valid for one hour after you generate it. To access the application
UI after that hour elapses, you must invoke the API again to generate a new URL.
"""
def get_dashboard_for_job_run(%Client{} = client, application_id, job_run_id, options \\ []) do
url_path =
Expand Down
12 changes: 6 additions & 6 deletions lib/aws/generated/rds.ex
Original file line number Diff line number Diff line change
Expand Up @@ -608,10 +608,10 @@ defmodule AWS.RDS do
primary cluster through high-speed replication performed by the Aurora storage
subsystem.
You can create a global database that is initially empty, and then add a primary
cluster and a secondary cluster to it. Or you can specify an existing Aurora
cluster during the create operation, and this cluster becomes the primary
cluster of the global database.
You can create a global database that is initially empty, and then create the
primary and secondary DB clusters in the global database. Or you can specify an
existing Aurora cluster during the create operation, and this cluster becomes
the primary cluster of the global database.
This operation applies only to Aurora DB clusters.
"""
Expand Down Expand Up @@ -1862,14 +1862,14 @@ defmodule AWS.RDS do
end

@doc """
Modify a setting for an Amazon Aurora global cluster.
Modifies a setting for an Amazon Aurora global cluster.
You can change one or more database configuration parameters by specifying these
parameters and the new values in the request. For more information on Amazon
Aurora, see [ What is Amazon Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide*.
This action only applies to Aurora DB clusters.
This operation only applies to Aurora global database clusters.
"""
def modify_global_cluster(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down
44 changes: 38 additions & 6 deletions lib/aws/generated/transfer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,13 @@ defmodule AWS.Transfer do

@doc """
Creates the connector, which captures the parameters for an outbound connection
for the AS2 protocol.
for the AS2 or SFTP protocol.
The connector is required for sending files to an externally hosted AS2 server.
For more details about connectors, see [Create AS2 connectors](https://docs.aws.amazon.com/transfer/latest/userguide/create-b2b-server.html#configure-as2-connector).
The connector is required for sending files to an externally hosted AS2 or SFTP
server. For more details about AS2 connectors, see [Create AS2 connectors](https://docs.aws.amazon.com/transfer/latest/userguide/create-b2b-server.html#configure-as2-connector).
You must specify exactly one configuration object: either for AS2 (`As2Config`)
or SFTP (`SftpConfig`).
"""
def create_connector(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down Expand Up @@ -167,7 +170,7 @@ defmodule AWS.Transfer do
end

@doc """
Deletes the agreement that's specified in the provided `ConnectorId`.
Deletes the connector that's specified in the provided `ConnectorId`.
"""
def delete_connector(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down Expand Up @@ -543,9 +546,25 @@ defmodule AWS.Transfer do
end

@doc """
Begins an outbound file transfer to a remote AS2 server.
Begins a file transfer between local Amazon Web Services storage and a remote
AS2 or SFTP server.
* For an AS2 connector, you specify the `ConnectorId` and one or
more `SendFilePaths` to identify the files you want to transfer.
* For an SFTP connector, the file transfer can be either outbound or
inbound. In both cases, you specify the `ConnectorId`. Depending on the
direction of the transfer, you also specify the following items:
You specify the `ConnectorId` and the file paths for where to send the files.
* If you are transferring file from a partner's SFTP
server to a Transfer Family server, you specify one or more `RetreiveFilePaths`
to identify the files you want to transfer, and a `LocalDirectoryPath` to
specify the destination folder.
* If you are transferring file to a partner's SFTP
server from Amazon Web Services storage, you specify one or more `SendFilePaths`
to identify the files you want to transfer, and a `RemoteDirectoryPath` to
specify the destination folder.
"""
def start_file_transfer(%Client{} = client, input, options \\ []) do
meta = metadata()
Expand Down Expand Up @@ -609,6 +628,19 @@ defmodule AWS.Transfer do
Request.request_post(client, meta, "TagResource", input, options)
end

@doc """
Tests whether your SFTP connector is set up successfully.
We highly recommend that you call this operation to test your ability to
transfer files between a Transfer Family server and a trading partner's SFTP
server.
"""
def test_connection(%Client{} = client, input, options \\ []) do
meta = metadata()

Request.request_post(client, meta, "TestConnection", input, options)
end

@doc """
If the `IdentityProviderType` of a file transfer protocol-enabled server is
`AWS_DIRECTORY_SERVICE` or `API_Gateway`, tests whether your identity provider
Expand Down

0 comments on commit 63af407

Please sign in to comment.