From 4a4ed04b932e495d65d8180bcfa8ed074bf29013 Mon Sep 17 00:00:00 2001 From: awstools Date: Mon, 12 Jun 2023 18:16:04 +0000 Subject: [PATCH] docs(client-dynamodb): Documentation updates for DynamoDB --- .../client-dynamodb/src/commands/BatchGetItemCommand.ts | 4 ++-- .../client-dynamodb/src/commands/CreateBackupCommand.ts | 3 +++ .../src/commands/CreateGlobalTableCommand.ts | 3 +++ .../client-dynamodb/src/commands/CreateTableCommand.ts | 3 +++ .../client-dynamodb/src/commands/DeleteBackupCommand.ts | 3 +++ .../client-dynamodb/src/commands/DeleteTableCommand.ts | 3 +++ .../client-dynamodb/src/commands/DescribeExportCommand.ts | 3 +++ .../commands/DisableKinesisStreamingDestinationCommand.ts | 3 +++ .../commands/EnableKinesisStreamingDestinationCommand.ts | 3 +++ .../src/commands/ExportTableToPointInTimeCommand.ts | 3 +++ .../client-dynamodb/src/commands/ImportTableCommand.ts | 3 +++ .../client-dynamodb/src/commands/ListExportsCommand.ts | 3 +++ .../client-dynamodb/src/commands/ListImportsCommand.ts | 3 +++ .../src/commands/RestoreTableFromBackupCommand.ts | 3 +++ .../src/commands/RestoreTableToPointInTimeCommand.ts | 3 +++ .../client-dynamodb/src/commands/TagResourceCommand.ts | 3 +++ .../client-dynamodb/src/commands/UntagResourceCommand.ts | 3 +++ .../src/commands/UpdateGlobalTableSettingsCommand.ts | 3 +++ .../client-dynamodb/src/commands/UpdateTableCommand.ts | 3 +++ .../src/commands/UpdateTableReplicaAutoScalingCommand.ts | 3 +++ .../src/commands/UpdateTimeToLiveCommand.ts | 3 +++ clients/client-dynamodb/src/models/models_0.ts | 7 +++++-- codegen/sdk-codegen/aws-models/dynamodb.json | 8 ++++---- 23 files changed, 71 insertions(+), 8 deletions(-) diff --git a/clients/client-dynamodb/src/commands/BatchGetItemCommand.ts b/clients/client-dynamodb/src/commands/BatchGetItemCommand.ts index f4e32f726bf2..7cae3bbe7449 100644 --- a/clients/client-dynamodb/src/commands/BatchGetItemCommand.ts +++ b/clients/client-dynamodb/src/commands/BatchGetItemCommand.ts @@ -40,8 +40,8 @@ export interface BatchGetItemCommandOutput extends BatchGetItemOutput, __Metadat * from one or more tables. You identify requested items by primary key.

*

A single operation can retrieve up to 16 MB of data, which can contain as many as 100 * items. BatchGetItem returns a partial result if the response size limit is - * exceeded, the table's provisioned throughput is exceeded, or an internal processing - * failure occurs. If a partial result is returned, the operation returns a value for + * exceeded, the table's provisioned throughput is exceeded, more than 1MB per partition is requested, + * or an internal processing failure occurs. If a partial result is returned, the operation returns a value for * UnprocessedKeys. You can use this value to retry the operation starting * with the next item to get.

* diff --git a/clients/client-dynamodb/src/commands/CreateBackupCommand.ts b/clients/client-dynamodb/src/commands/CreateBackupCommand.ts index cb0b5f63b0e6..34278744cb2b 100644 --- a/clients/client-dynamodb/src/commands/CreateBackupCommand.ts +++ b/clients/client-dynamodb/src/commands/CreateBackupCommand.ts @@ -122,6 +122,9 @@ export interface CreateBackupCommandOutput extends CreateBackupOutput, __Metadat * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link TableInUseException} (client fault) *

A target table with the specified name is either being created or deleted. diff --git a/clients/client-dynamodb/src/commands/CreateGlobalTableCommand.ts b/clients/client-dynamodb/src/commands/CreateGlobalTableCommand.ts index 60efcd45c663..6713ebb99821 100644 --- a/clients/client-dynamodb/src/commands/CreateGlobalTableCommand.ts +++ b/clients/client-dynamodb/src/commands/CreateGlobalTableCommand.ts @@ -177,6 +177,9 @@ export interface CreateGlobalTableCommandOutput extends CreateGlobalTableOutput, * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link TableNotFoundException} (client fault) *

A source table with the name TableName does not currently exist within diff --git a/clients/client-dynamodb/src/commands/CreateTableCommand.ts b/clients/client-dynamodb/src/commands/CreateTableCommand.ts index 9ea67a9e2205..d719845c4231 100644 --- a/clients/client-dynamodb/src/commands/CreateTableCommand.ts +++ b/clients/client-dynamodb/src/commands/CreateTableCommand.ts @@ -295,6 +295,9 @@ export interface CreateTableCommandOutput extends CreateTableOutput, __MetadataB * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/DeleteBackupCommand.ts b/clients/client-dynamodb/src/commands/DeleteBackupCommand.ts index e67788d41312..2e09c9b7f585 100644 --- a/clients/client-dynamodb/src/commands/DeleteBackupCommand.ts +++ b/clients/client-dynamodb/src/commands/DeleteBackupCommand.ts @@ -169,6 +169,9 @@ export interface DeleteBackupCommandOutput extends DeleteBackupOutput, __Metadat * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link DynamoDBServiceException} *

Base exception class for all service exceptions from DynamoDB service.

diff --git a/clients/client-dynamodb/src/commands/DeleteTableCommand.ts b/clients/client-dynamodb/src/commands/DeleteTableCommand.ts index 7d9cf0be70b9..005824443057 100644 --- a/clients/client-dynamodb/src/commands/DeleteTableCommand.ts +++ b/clients/client-dynamodb/src/commands/DeleteTableCommand.ts @@ -233,6 +233,9 @@ export interface DeleteTableCommandOutput extends DeleteTableOutput, __MetadataB * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/DescribeExportCommand.ts b/clients/client-dynamodb/src/commands/DescribeExportCommand.ts index ca06b7df37f0..4a819d0dd170 100644 --- a/clients/client-dynamodb/src/commands/DescribeExportCommand.ts +++ b/clients/client-dynamodb/src/commands/DescribeExportCommand.ts @@ -98,6 +98,9 @@ export interface DescribeExportCommandOutput extends DescribeExportOutput, __Met * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link DynamoDBServiceException} *

Base exception class for all service exceptions from DynamoDB service.

diff --git a/clients/client-dynamodb/src/commands/DisableKinesisStreamingDestinationCommand.ts b/clients/client-dynamodb/src/commands/DisableKinesisStreamingDestinationCommand.ts index c87351ade46f..29a3ee8897e4 100644 --- a/clients/client-dynamodb/src/commands/DisableKinesisStreamingDestinationCommand.ts +++ b/clients/client-dynamodb/src/commands/DisableKinesisStreamingDestinationCommand.ts @@ -86,6 +86,9 @@ export interface DisableKinesisStreamingDestinationCommandOutput * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/EnableKinesisStreamingDestinationCommand.ts b/clients/client-dynamodb/src/commands/EnableKinesisStreamingDestinationCommand.ts index 2c75871670ed..c6a42e1b1efb 100644 --- a/clients/client-dynamodb/src/commands/EnableKinesisStreamingDestinationCommand.ts +++ b/clients/client-dynamodb/src/commands/EnableKinesisStreamingDestinationCommand.ts @@ -88,6 +88,9 @@ export interface EnableKinesisStreamingDestinationCommandOutput * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/ExportTableToPointInTimeCommand.ts b/clients/client-dynamodb/src/commands/ExportTableToPointInTimeCommand.ts index bf09c08c8cd2..f84ea4b519b3 100644 --- a/clients/client-dynamodb/src/commands/ExportTableToPointInTimeCommand.ts +++ b/clients/client-dynamodb/src/commands/ExportTableToPointInTimeCommand.ts @@ -112,6 +112,9 @@ export interface ExportTableToPointInTimeCommandOutput extends ExportTableToPoin * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) *

Point in time recovery has not yet been enabled for this source table.

diff --git a/clients/client-dynamodb/src/commands/ImportTableCommand.ts b/clients/client-dynamodb/src/commands/ImportTableCommand.ts index fc516eef498f..75b8b6159faa 100644 --- a/clients/client-dynamodb/src/commands/ImportTableCommand.ts +++ b/clients/client-dynamodb/src/commands/ImportTableCommand.ts @@ -216,6 +216,9 @@ export interface ImportTableCommandOutput extends ImportTableOutput, __MetadataB * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/ListExportsCommand.ts b/clients/client-dynamodb/src/commands/ListExportsCommand.ts index bc289c1ce99e..78deb852a8e6 100644 --- a/clients/client-dynamodb/src/commands/ListExportsCommand.ts +++ b/clients/client-dynamodb/src/commands/ListExportsCommand.ts @@ -83,6 +83,9 @@ export interface ListExportsCommandOutput extends ListExportsOutput, __MetadataB * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link DynamoDBServiceException} *

Base exception class for all service exceptions from DynamoDB service.

diff --git a/clients/client-dynamodb/src/commands/ListImportsCommand.ts b/clients/client-dynamodb/src/commands/ListImportsCommand.ts index 768b10f78456..2a0bd9ba3ab8 100644 --- a/clients/client-dynamodb/src/commands/ListImportsCommand.ts +++ b/clients/client-dynamodb/src/commands/ListImportsCommand.ts @@ -90,6 +90,9 @@ export interface ListImportsCommandOutput extends ListImportsOutput, __MetadataB * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link DynamoDBServiceException} *

Base exception class for all service exceptions from DynamoDB service.

diff --git a/clients/client-dynamodb/src/commands/RestoreTableFromBackupCommand.ts b/clients/client-dynamodb/src/commands/RestoreTableFromBackupCommand.ts index 3f00d3b1721a..d1d35a270afb 100644 --- a/clients/client-dynamodb/src/commands/RestoreTableFromBackupCommand.ts +++ b/clients/client-dynamodb/src/commands/RestoreTableFromBackupCommand.ts @@ -291,6 +291,9 @@ export interface RestoreTableFromBackupCommandOutput extends RestoreTableFromBac * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link TableAlreadyExistsException} (client fault) *

A target table with the specified name already exists.

diff --git a/clients/client-dynamodb/src/commands/RestoreTableToPointInTimeCommand.ts b/clients/client-dynamodb/src/commands/RestoreTableToPointInTimeCommand.ts index 8d1bf2bf2b4f..d3f5b5e28080 100644 --- a/clients/client-dynamodb/src/commands/RestoreTableToPointInTimeCommand.ts +++ b/clients/client-dynamodb/src/commands/RestoreTableToPointInTimeCommand.ts @@ -316,6 +316,9 @@ export interface RestoreTableToPointInTimeCommandOutput extends RestoreTableToPo * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) *

Point in time recovery has not yet been enabled for this source table.

diff --git a/clients/client-dynamodb/src/commands/TagResourceCommand.ts b/clients/client-dynamodb/src/commands/TagResourceCommand.ts index a6adeecde7fe..d0d5f5de0ec1 100644 --- a/clients/client-dynamodb/src/commands/TagResourceCommand.ts +++ b/clients/client-dynamodb/src/commands/TagResourceCommand.ts @@ -86,6 +86,9 @@ export interface TagResourceCommandOutput extends __MetadataBearer {} * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/UntagResourceCommand.ts b/clients/client-dynamodb/src/commands/UntagResourceCommand.ts index e381ac8a11bc..d95315c2597d 100644 --- a/clients/client-dynamodb/src/commands/UntagResourceCommand.ts +++ b/clients/client-dynamodb/src/commands/UntagResourceCommand.ts @@ -81,6 +81,9 @@ export interface UntagResourceCommandOutput extends __MetadataBearer {} * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/UpdateGlobalTableSettingsCommand.ts b/clients/client-dynamodb/src/commands/UpdateGlobalTableSettingsCommand.ts index cd974ee4a8e9..696e04b7f9e4 100644 --- a/clients/client-dynamodb/src/commands/UpdateGlobalTableSettingsCommand.ts +++ b/clients/client-dynamodb/src/commands/UpdateGlobalTableSettingsCommand.ts @@ -254,6 +254,9 @@ export interface UpdateGlobalTableSettingsCommandOutput extends UpdateGlobalTabl * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ReplicaNotFoundException} (client fault) *

The specified replica is no longer part of the global table.

diff --git a/clients/client-dynamodb/src/commands/UpdateTableCommand.ts b/clients/client-dynamodb/src/commands/UpdateTableCommand.ts index 50a145084373..5b9a6e10dcae 100644 --- a/clients/client-dynamodb/src/commands/UpdateTableCommand.ts +++ b/clients/client-dynamodb/src/commands/UpdateTableCommand.ts @@ -331,6 +331,9 @@ export interface UpdateTableCommandOutput extends UpdateTableOutput, __MetadataB * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/UpdateTableReplicaAutoScalingCommand.ts b/clients/client-dynamodb/src/commands/UpdateTableReplicaAutoScalingCommand.ts index 8e3b14aed558..a2091ff07651 100644 --- a/clients/client-dynamodb/src/commands/UpdateTableReplicaAutoScalingCommand.ts +++ b/clients/client-dynamodb/src/commands/UpdateTableReplicaAutoScalingCommand.ts @@ -215,6 +215,9 @@ export interface UpdateTableReplicaAutoScalingCommandOutput * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/commands/UpdateTimeToLiveCommand.ts b/clients/client-dynamodb/src/commands/UpdateTimeToLiveCommand.ts index 4206560fed80..81e5de552ffd 100644 --- a/clients/client-dynamodb/src/commands/UpdateTimeToLiveCommand.ts +++ b/clients/client-dynamodb/src/commands/UpdateTimeToLiveCommand.ts @@ -108,6 +108,9 @@ export interface UpdateTimeToLiveCommandOutput extends UpdateTimeToLiveOutput, _ * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

* * @throws {@link ResourceInUseException} (client fault) *

The operation conflicts with the resource's availability. For example, you diff --git a/clients/client-dynamodb/src/models/models_0.ts b/clients/client-dynamodb/src/models/models_0.ts index 07aa9bafa28b..bfcf3d1539ff 100644 --- a/clients/client-dynamodb/src/models/models_0.ts +++ b/clients/client-dynamodb/src/models/models_0.ts @@ -452,7 +452,7 @@ export interface KeySchemaElement { export interface ProvisionedThroughput { /** *

The maximum number of strongly consistent reads consumed per second before DynamoDB - * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB * Developer Guide.

*

If read/write capacity mode is PAY_PER_REQUEST the value is set to * 0.

@@ -461,7 +461,7 @@ export interface ProvisionedThroughput { /** *

The maximum number of writes consumed per second before DynamoDB returns a - * ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB * Developer Guide.

*

If read/write capacity mode is PAY_PER_REQUEST the value is set to * 0.

@@ -1615,6 +1615,9 @@ export interface CreateBackupOutput { * of concurrent operations.

*

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

*

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

*/ export class LimitExceededException extends __BaseException { readonly name: "LimitExceededException" = "LimitExceededException"; diff --git a/codegen/sdk-codegen/aws-models/dynamodb.json b/codegen/sdk-codegen/aws-models/dynamodb.json index c372005d4665..dfe5543d9d69 100644 --- a/codegen/sdk-codegen/aws-models/dynamodb.json +++ b/codegen/sdk-codegen/aws-models/dynamodb.json @@ -833,7 +833,7 @@ "aws.api#clientDiscoveredEndpoint": { "required": false }, - "smithy.api#documentation": "

The BatchGetItem operation returns the attributes of one or more items\n from one or more tables. You identify requested items by primary key.

\n

A single operation can retrieve up to 16 MB of data, which can contain as many as 100\n items. BatchGetItem returns a partial result if the response size limit is\n exceeded, the table's provisioned throughput is exceeded, or an internal processing\n failure occurs. If a partial result is returned, the operation returns a value for\n UnprocessedKeys. You can use this value to retry the operation starting\n with the next item to get.

\n \n

If you request more than 100 items, BatchGetItem returns a\n ValidationException with the message \"Too many items requested for\n the BatchGetItem call.\"

\n
\n

For example, if you ask to retrieve 100 items, but each individual item is 300 KB in\n size, the system returns 52 items (so as not to exceed the 16 MB limit). It also returns\n an appropriate UnprocessedKeys value so you can get the next page of\n results. If desired, your application can include its own logic to assemble the pages of\n results into one dataset.

\n

If none of the items can be processed due to insufficient\n provisioned throughput on all of the tables in the request, then\n BatchGetItem returns a\n ProvisionedThroughputExceededException. If at least\n one of the items is successfully processed, then\n BatchGetItem completes successfully, while returning the keys of the\n unread items in UnprocessedKeys.

\n \n

If DynamoDB returns any unprocessed items, you should retry the batch operation on\n those items. However, we strongly recommend that you use an exponential\n backoff algorithm. If you retry the batch operation immediately, the\n underlying read or write requests can still fail due to throttling on the individual\n tables. If you delay the batch operation using exponential backoff, the individual\n requests in the batch are much more likely to succeed.

\n

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB\n Developer Guide.

\n
\n

By default, BatchGetItem performs eventually consistent reads on every\n table in the request. If you want strongly consistent reads instead, you can set\n ConsistentRead to true for any or all tables.

\n

In order to minimize response latency, BatchGetItem may retrieve items in\n parallel.

\n

When designing your application, keep in mind that DynamoDB does not return items in\n any particular order. To help parse the response by item, include the primary key values\n for the items in your request in the ProjectionExpression parameter.

\n

If a requested item does not exist, it is not returned in the result. Requests for\n nonexistent items consume the minimum read capacity units according to the type of read.\n For more information, see Working with Tables in the Amazon DynamoDB Developer\n Guide.

" + "smithy.api#documentation": "

The BatchGetItem operation returns the attributes of one or more items\n from one or more tables. You identify requested items by primary key.

\n

A single operation can retrieve up to 16 MB of data, which can contain as many as 100\n items. BatchGetItem returns a partial result if the response size limit is\n exceeded, the table's provisioned throughput is exceeded, more than 1MB per partition is requested,\n or an internal processing failure occurs. If a partial result is returned, the operation returns a value for\n UnprocessedKeys. You can use this value to retry the operation starting\n with the next item to get.

\n \n

If you request more than 100 items, BatchGetItem returns a\n ValidationException with the message \"Too many items requested for\n the BatchGetItem call.\"

\n
\n

For example, if you ask to retrieve 100 items, but each individual item is 300 KB in\n size, the system returns 52 items (so as not to exceed the 16 MB limit). It also returns\n an appropriate UnprocessedKeys value so you can get the next page of\n results. If desired, your application can include its own logic to assemble the pages of\n results into one dataset.

\n

If none of the items can be processed due to insufficient\n provisioned throughput on all of the tables in the request, then\n BatchGetItem returns a\n ProvisionedThroughputExceededException. If at least\n one of the items is successfully processed, then\n BatchGetItem completes successfully, while returning the keys of the\n unread items in UnprocessedKeys.

\n \n

If DynamoDB returns any unprocessed items, you should retry the batch operation on\n those items. However, we strongly recommend that you use an exponential\n backoff algorithm. If you retry the batch operation immediately, the\n underlying read or write requests can still fail due to throttling on the individual\n tables. If you delay the batch operation using exponential backoff, the individual\n requests in the batch are much more likely to succeed.

\n

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB\n Developer Guide.

\n
\n

By default, BatchGetItem performs eventually consistent reads on every\n table in the request. If you want strongly consistent reads instead, you can set\n ConsistentRead to true for any or all tables.

\n

In order to minimize response latency, BatchGetItem may retrieve items in\n parallel.

\n

When designing your application, keep in mind that DynamoDB does not return items in\n any particular order. To help parse the response by item, include the primary key values\n for the items in your request in the ProjectionExpression parameter.

\n

If a requested item does not exist, it is not returned in the result. Requests for\n nonexistent items consume the minimum read capacity units according to the type of read.\n For more information, see Working with Tables in the Amazon DynamoDB Developer\n Guide.

" } }, "com.amazonaws.dynamodb#BatchGetItemInput": { @@ -6732,7 +6732,7 @@ } }, "traits": { - "smithy.api#documentation": "

There is no limit to the number of daily on-demand backups that can be taken.

\n

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations\n include CreateTable, UpdateTable,\n DeleteTable,UpdateTimeToLive,\n RestoreTableFromBackup, and RestoreTableToPointInTime.

\n

When you are creating a table with one or more secondary\n indexes, you can have up to 250 such requests running at a time. However, if the table or\n index specifications are complex, then DynamoDB might temporarily reduce the number\n of concurrent operations.

\n

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

\n

There is a soft account quota of 2,500 tables.

", + "smithy.api#documentation": "

There is no limit to the number of daily on-demand backups that can be taken.

\n

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations\n include CreateTable, UpdateTable,\n DeleteTable,UpdateTimeToLive,\n RestoreTableFromBackup, and RestoreTableToPointInTime.

\n

When you are creating a table with one or more secondary\n indexes, you can have up to 250 such requests running at a time. However, if the table or\n index specifications are complex, then DynamoDB might temporarily reduce the number\n of concurrent operations.

\n

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

\n

There is a soft account quota of 2,500 tables.

\n

GetRecords was called with a value of more than 1000 for the limit request parameter.

\n

More than 2 processes are reading from the same streams shard at the same time. Exceeding\n this limit may result in request throttling.

", "smithy.api#error": "client" } }, @@ -7689,14 +7689,14 @@ "ReadCapacityUnits": { "target": "com.amazonaws.dynamodb#PositiveLongObject", "traits": { - "smithy.api#documentation": "

The maximum number of strongly consistent reads consumed per second before DynamoDB\n returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB\n Developer Guide.

\n

If read/write capacity mode is PAY_PER_REQUEST the value is set to\n 0.

", + "smithy.api#documentation": "

The maximum number of strongly consistent reads consumed per second before DynamoDB\n returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB\n Developer Guide.

\n

If read/write capacity mode is PAY_PER_REQUEST the value is set to\n 0.

", "smithy.api#required": {} } }, "WriteCapacityUnits": { "target": "com.amazonaws.dynamodb#PositiveLongObject", "traits": { - "smithy.api#documentation": "

The maximum number of writes consumed per second before DynamoDB returns a\n ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB\n Developer Guide.

\n

If read/write capacity mode is PAY_PER_REQUEST the value is set to\n 0.

", + "smithy.api#documentation": "

The maximum number of writes consumed per second before DynamoDB returns a\n ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB\n Developer Guide.

\n

If read/write capacity mode is PAY_PER_REQUEST the value is set to\n 0.

", "smithy.api#required": {} } }