diff --git a/.changes/1.37.0.json b/.changes/1.37.0.json
new file mode 100644
index 000000000000..e585c5c9293e
--- /dev/null
+++ b/.changes/1.37.0.json
@@ -0,0 +1,62 @@
+[
+ {
+ "category": "``apigateway``",
+ "description": "Documentation updates for Amazon API Gateway",
+ "type": "api-change"
+ },
+ {
+ "category": "``bedrock-agent-runtime``",
+ "description": "Now supports streaming for inline agents.",
+ "type": "api-change"
+ },
+ {
+ "category": "``cognito-identity``",
+ "description": "corrects the dual-stack endpoint configuration",
+ "type": "api-change"
+ },
+ {
+ "category": "``partnercentral-selling``",
+ "description": "Add Tagging support for ResourceSnapshotJob resources",
+ "type": "api-change"
+ },
+ {
+ "category": "``s3``",
+ "description": "This change enhances integrity protections for new SDK requests to S3. S3 SDKs now support the CRC64NVME checksum algorithm, full object checksums for multipart S3 objects, and new default integrity protections for S3 requests.",
+ "type": "api-change"
+ },
+ {
+ "category": "``security-ir``",
+ "description": "Increase minimum length of Threat Actor IP 'userAgent' to 1.",
+ "type": "api-change"
+ },
+ {
+ "category": "``sesv2``",
+ "description": "This release introduces a new recommendation in Virtual Deliverability Manager Advisor, which detects elevated complaint rates for customer sending identities.",
+ "type": "api-change"
+ },
+ {
+ "category": "``workspaces``",
+ "description": "Added GeneralPurpose.4xlarge & GeneralPurpose.8xlarge ComputeTypes.",
+ "type": "api-change"
+ },
+ {
+ "category": "``workspaces-thin-client``",
+ "description": "Mark type in MaintenanceWindow as required.",
+ "type": "api-change"
+ },
+ {
+ "category": "``s3``",
+ "description": "The S3 client attempts to validate response checksums for all S3 API operations that support checksums. However, if the SDK has not implemented the specified checksum algorithm then this validation is skipped. Checksum validation behavior can be configured using the ``when_supported`` and ``when_required`` options - in code using the ``response_checksum_validation`` parameter for ``botocore.config.Config``, in the shared AWS config file using ``response_checksum_validation``, or as an env variable using ``AWS_RESPONSE_CHECKSUM_VALIDATION``.",
+ "type": "feature"
+ },
+ {
+ "category": "``s3``",
+ "description": "Added support for the CRC64NVME checksum algorithm in the S3 client through the optional AWS CRT (``awscrt``) dependency.",
+ "type": "feature"
+ },
+ {
+ "category": "``s3``",
+ "description": "S3 client behavior is updated to always calculate a CRC32 checksum by default for operations that support it (such as PutObject or UploadPart), or require it (such as DeleteObjects). Checksum behavior can be configured using ``when_supported`` and ``when_required`` options - in code using the ``request_checksum_calculation`` parameter for ``botocore.config.Config``, in the shared AWS config file using ``request_checksum_calculation``, or as an env variable using ``AWS_REQUEST_CHECKSUM_CALCULATION``. Note: Botocore will no longer automatically compute and populate the Content-MD5 header.",
+ "type": "feature"
+ }
+]
\ No newline at end of file
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index c62e85eec647..af3bf08d6e31 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,6 +2,23 @@
CHANGELOG
=========
+1.37.0
+======
+
+* api-change:``apigateway``: Documentation updates for Amazon API Gateway
+* api-change:``bedrock-agent-runtime``: Now supports streaming for inline agents.
+* api-change:``cognito-identity``: corrects the dual-stack endpoint configuration
+* api-change:``partnercentral-selling``: Add Tagging support for ResourceSnapshotJob resources
+* api-change:``s3``: This change enhances integrity protections for new SDK requests to S3. S3 SDKs now support the CRC64NVME checksum algorithm, full object checksums for multipart S3 objects, and new default integrity protections for S3 requests.
+* api-change:``security-ir``: Increase minimum length of Threat Actor IP 'userAgent' to 1.
+* api-change:``sesv2``: This release introduces a new recommendation in Virtual Deliverability Manager Advisor, which detects elevated complaint rates for customer sending identities.
+* api-change:``workspaces``: Added GeneralPurpose.4xlarge & GeneralPurpose.8xlarge ComputeTypes.
+* api-change:``workspaces-thin-client``: Mark type in MaintenanceWindow as required.
+* feature:``s3``: The S3 client attempts to validate response checksums for all S3 API operations that support checksums. However, if the SDK has not implemented the specified checksum algorithm then this validation is skipped. Checksum validation behavior can be configured using the ``when_supported`` and ``when_required`` options - in code using the ``response_checksum_validation`` parameter for ``botocore.config.Config``, in the shared AWS config file using ``response_checksum_validation``, or as an env variable using ``AWS_RESPONSE_CHECKSUM_VALIDATION``.
+* feature:``s3``: Added support for the CRC64NVME checksum algorithm in the S3 client through the optional AWS CRT (``awscrt``) dependency.
+* feature:``s3``: S3 client behavior is updated to always calculate a CRC32 checksum by default for operations that support it (such as PutObject or UploadPart), or require it (such as DeleteObjects). Checksum behavior can be configured using ``when_supported`` and ``when_required`` options - in code using the ``request_checksum_calculation`` parameter for ``botocore.config.Config``, in the shared AWS config file using ``request_checksum_calculation``, or as an env variable using ``AWS_REQUEST_CHECKSUM_CALCULATION``. Note: Botocore will no longer automatically compute and populate the Content-MD5 header.
+
+
1.36.40
=======
diff --git a/awscli/__init__.py b/awscli/__init__.py
index d52c12ed6343..9ba1c8ba2695 100644
--- a/awscli/__init__.py
+++ b/awscli/__init__.py
@@ -18,7 +18,7 @@
import os
-__version__ = '1.36.40'
+__version__ = '1.37.0'
#
# Get our data path to be added to botocore's search path
diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py
index e0f79b90bcbe..3f3a2834c6d5 100644
--- a/awscli/customizations/s3/subcommands.py
+++ b/awscli/customizations/s3/subcommands.py
@@ -458,7 +458,7 @@
}
CHECKSUM_ALGORITHM = {
- 'name': 'checksum-algorithm', 'choices': ['CRC32', 'SHA256', 'SHA1', 'CRC32C'],
+ 'name': 'checksum-algorithm', 'choices': ['CRC64NVME', 'CRC32', 'SHA256', 'SHA1', 'CRC32C'],
'help_text': 'Indicates the algorithm used to create the checksum for the object.'
}
diff --git a/awscli/topics/s3-faq.rst b/awscli/topics/s3-faq.rst
index 2e8babe13a41..975b8d122a66 100644
--- a/awscli/topics/s3-faq.rst
+++ b/awscli/topics/s3-faq.rst
@@ -13,32 +13,55 @@ Below are common questions regarding the use of Amazon S3 in the AWS CLI.
Q: Does the AWS CLI validate checksums?
---------------------------------------
-The AWS CLI will perform checksum validation for uploading files in
-specific scenarios.
+The AWS CLI will attempt to perform checksum validation for uploading and
+downloading files, as described below.
Upload
~~~~~~
-The AWS CLI will calculate and auto-populate the ``Content-MD5`` header for
-both standard and multipart uploads. If the checksum that S3 calculates does
-not match the ``Content-MD5`` provided, S3 will not store the object and
-instead will return an error message back the AWS CLI. The AWS CLI will retry
-this error up to 5 times before giving up. On the case that any files fail to
-transfer successfully to S3, the AWS CLI will exit with a non zero RC.
-See ``aws help return-codes`` for more information.
-
-If the upload request is signed with Signature Version 4, then the AWS CLI uses the
-``x-amz-content-sha256`` header as a checksum instead of ``Content-MD5``.
-The AWS CLI will use Signature Version 4 for S3 in several cases:
-
-* You're using an AWS region that only supports Signature Version 4. This
- includes ``eu-central-1`` and ``ap-northeast-2``.
-* You explicitly opt in and set ``signature_version = s3v4`` in your
- ``~/.aws/config`` file.
-
-Note that the AWS CLI will add a ``Content-MD5`` header for both
-the high level ``aws s3`` commands that perform uploads
-(``aws s3 cp``, ``aws s3 sync``) as well as the low level ``s3api``
-commands including ``aws s3api put-object`` and ``aws s3api upload-part``.
-
-If you want to verify the integrity of an object during upload, see `How can I check the integrity of an object uploaded to Amazon S3? `_ in the *AWS Knowledge Center*.
+The AWS CLI v1 will calculate and auto-populate a ``x-amz-checksum-`` HTTP header by
+default for each upload, where ```` is the algorithm used to calculate the checksum.
+By default, the Cyclic Redundancy Check 32 (CRC32) algorithm
+is used to calculate checksums, but an alternative algorithm can be specified by using the
+``--checksum-algorithm`` argument on high-level ``aws s3`` commands. The checksum algorithms
+supported by the AWS CLI v1 are:
+
+- CRC64NVME (Recommended)
+- CRC32
+- CRC32C
+- SHA1
+- SHA256
+
+Amazon S3 will use the algorithm specified in the header to calculate the checksum of the object. If it
+does not match the checksum provided, the object will not be stored and an error message
+will be returned. Otherwise, the checksum is stored in object metadata that you can use
+later to verify data integrity of download operations (see Download section).
+
+.. note::
+ Note that the AWS CLI will perform the above checksum calculations for commands that perform uploads. This
+ includes high-level commands like ``aws s3 cp``, ``aws s3 sync``, and ``aws s3 mv``, and low-level commands
+ like ``aws s3api put-object`` and ``aws s3api upload-part``."
+
+ For high-level command invocations that result in uploading multiple files (e.g. ``aws s3 sync``),
+ the same checksum algorithm will be used for all file uploads included in the command execution.
+
+For more information about verifying data integrity in Amazon S3, see
+`Checking object integrity in Amazon S3?
+`_ in the Amazon S3 User Guide.
+
+Download
+~~~~~~
+
+The AWS CLI will attempt to verify the checksum of downloads when possible. If a non-MD5 checksum is returned
+with a downloaded object, the CLI will use the same algorithm to recalculate the checksum and verify
+it matches the one stored in Amazon S3. If checksum validation fails, an error is raised and the request will NOT be
+retried.
+
+.. note::
+ Note that the AWS CLI will perform the above checksum calculations for commands that perform uploads. This
+ includes high-level commands like ``aws s3 cp``, ``aws s3 sync``, and ``aws s3 mv``, and low-level commands
+ like ``aws s3api get-object``"
+
+For more information about verifying data integrity in Amazon S3, see
+`Checking object integrity in Amazon S3?
+`_ in the Amazon S3 User Guide.
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 6eed4fd98d10..746b6d90bea5 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -50,9 +50,9 @@
# built documents.
#
# The short X.Y version.
-version = '1.36.'
+version = '1.37'
# The full version, including alpha/beta/rc tags.
-release = '1.36.40'
+release = '1.37.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/setup.cfg b/setup.cfg
index e45874bdfd23..6b4d6d09c976 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -3,9 +3,9 @@ universal = 0
[metadata]
requires_dist =
- botocore==1.35.99
+ botocore==1.36.0
docutils>=0.10,<0.17
- s3transfer>=0.10.0,<0.11.0
+ s3transfer>=0.11.0,<0.12.0
PyYAML>=3.10,<6.1
colorama>=0.2.5,<0.4.7
rsa>=3.1.2,<4.8
diff --git a/setup.py b/setup.py
index 6e2085e60e2c..de95ca6d0861 100644
--- a/setup.py
+++ b/setup.py
@@ -24,9 +24,9 @@ def find_version(*file_paths):
install_requires = [
- 'botocore==1.35.99',
+ 'botocore==1.36.0',
'docutils>=0.10,<0.17',
- 's3transfer>=0.10.0,<0.11.0',
+ 's3transfer>=0.11.0,<0.12.0',
'PyYAML>=3.10,<6.1',
'colorama>=0.2.5,<0.4.7',
'rsa>=3.1.2,<4.8',
diff --git a/tests/functional/s3/__init__.py b/tests/functional/s3/__init__.py
index 8fe4c6a65006..c43e7d68e472 100644
--- a/tests/functional/s3/__init__.py
+++ b/tests/functional/s3/__init__.py
@@ -105,6 +105,7 @@ def put_object_request(self, bucket, key, **override_kwargs):
params = {
'Bucket': bucket,
'Key': key,
+ 'ChecksumAlgorithm': 'CRC32',
'Body': mock.ANY,
}
params.update(override_kwargs)
diff --git a/tests/functional/s3/test_cp_command.py b/tests/functional/s3/test_cp_command.py
index 14617e64b450..79eea2ccab7e 100644
--- a/tests/functional/s3/test_cp_command.py
+++ b/tests/functional/s3/test_cp_command.py
@@ -82,7 +82,7 @@ def test_upload_grants(self):
{'Key': u'key.txt', 'Bucket': u'bucket', 'GrantRead': u'id=foo',
'GrantFullControl': u'id=bar', 'GrantReadACP': u'id=biz',
'GrantWriteACP': u'id=baz', 'ContentType': u'text/plain',
- 'Body': mock.ANY}
+ 'Body': mock.ANY, 'ChecksumAlgorithm': 'CRC32'}
)
def test_upload_expires(self):
@@ -449,6 +449,7 @@ def test_cp_with_sse_flag(self):
self.assertDictEqual(
self.operations_called[0][1],
{'Key': 'key.txt', 'Bucket': 'bucket',
+ 'ChecksumAlgorithm': 'CRC32',
'ContentType': 'text/plain', 'Body': mock.ANY,
'ServerSideEncryption': 'AES256'}
)
@@ -464,6 +465,7 @@ def test_cp_with_sse_c_flag(self):
self.assertDictEqual(
self.operations_called[0][1],
{'Key': 'key.txt', 'Bucket': 'bucket',
+ 'ChecksumAlgorithm': 'CRC32',
'ContentType': 'text/plain', 'Body': mock.ANY,
'SSECustomerAlgorithm': 'AES256', 'SSECustomerKey': 'foo'}
)
@@ -488,6 +490,7 @@ def test_cp_with_sse_c_fileb(self):
expected_args = {
'Key': 'key.txt', 'Bucket': 'bucket',
+ 'ChecksumAlgorithm': 'CRC32',
'ContentType': 'text/plain',
'Body': mock.ANY,
'SSECustomerAlgorithm': 'AES256',
@@ -563,6 +566,7 @@ def test_cp_upload_with_sse_kms_and_key_id(self):
self.assertDictEqual(
self.operations_called[0][1],
{'Key': 'key.txt', 'Bucket': 'bucket',
+ 'ChecksumAlgorithm': 'CRC32',
'ContentType': 'text/plain', 'Body': mock.ANY,
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
)
@@ -588,6 +592,7 @@ def test_cp_upload_large_file_with_sse_kms_and_key_id(self):
self.assertDictEqual(
self.operations_called[0][1],
{'Key': 'key.txt', 'Bucket': 'bucket',
+ 'ChecksumAlgorithm': 'CRC32',
'ContentType': 'text/plain',
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
)
@@ -708,6 +713,14 @@ def test_upload_with_checksum_algorithm_crc32c(self):
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32C')
+ @requires_crt
+ def test_upload_with_checksum_algorithm_crc64nvme(self):
+ full_path = self.files.create_file('foo.txt', 'contents')
+ cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC64NVME'
+ self.run_cmd(cmdline, expected_rc=0)
+ self.assertEqual(self.operations_called[0][0].name, 'PutObject')
+ self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC64NVME')
+
def test_multipart_upload_with_checksum_algorithm_crc32(self):
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
self.parsed_responses = [
@@ -791,6 +804,7 @@ def test_streaming_upload(self):
expected_args = {
'Bucket': 'bucket',
'Key': 'streaming.txt',
+ 'ChecksumAlgorithm': 'CRC32',
'Body': mock.ANY
}
@@ -812,6 +826,7 @@ def test_streaming_upload_with_expected_size(self):
expected_args = {
'Bucket': 'bucket',
'Key': 'streaming.txt',
+ 'ChecksumAlgorithm': 'CRC32',
'Body': mock.ANY
}
@@ -917,6 +932,7 @@ def test_single_upload(self):
('PutObject', {
'Bucket': 'mybucket',
'Key': 'mykey',
+ 'ChecksumAlgorithm': 'CRC32',
'RequestPayer': 'requester',
'Body': mock.ANY,
})
@@ -941,11 +957,13 @@ def test_multipart_upload(self):
('CreateMultipartUpload', {
'Bucket': 'mybucket',
'Key': 'mykey',
+ 'ChecksumAlgorithm': 'CRC32',
'RequestPayer': 'requester',
}),
('UploadPart', {
'Bucket': 'mybucket',
'Key': 'mykey',
+ 'ChecksumAlgorithm': 'CRC32',
'RequestPayer': 'requester',
'UploadId': 'myid',
'PartNumber': mock.ANY,
@@ -954,6 +972,7 @@ def test_multipart_upload(self):
('UploadPart', {
'Bucket': 'mybucket',
'Key': 'mykey',
+ 'ChecksumAlgorithm': 'CRC32',
'RequestPayer': 'requester',
'UploadId': 'myid',
'PartNumber': mock.ANY,
@@ -986,6 +1005,7 @@ def test_recursive_upload(self):
('PutObject', {
'Bucket': 'mybucket',
'Key': 'myfile',
+ 'ChecksumAlgorithm': 'CRC32',
'RequestPayer': 'requester',
'Body': mock.ANY,
})
diff --git a/tests/functional/s3/test_sync_command.py b/tests/functional/s3/test_sync_command.py
index b3978edcf426..c096862ce01c 100644
--- a/tests/functional/s3/test_sync_command.py
+++ b/tests/functional/s3/test_sync_command.py
@@ -375,6 +375,22 @@ def test_download_with_checksum_mode_sha256(self):
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
self.assertIn(('ChecksumMode', 'ENABLED'), self.operations_called[1][1].items())
+ def test_download_with_checksum_mode_crc64nvme(self):
+ self.parsed_responses = [
+ self.list_objects_response(['bucket']),
+ # Mocked GetObject response with a checksum algorithm specified
+ {
+ 'ETag': 'foo-1',
+ 'ChecksumCRC64NVME': 'checksum',
+ 'Body': BytesIO(b'foo')
+ }
+ ]
+ cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'
+ self.run_cmd(cmdline, expected_rc=0)
+ self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
+ self.assertEqual(self.operations_called[1][0].name, 'GetObject')
+ self.assertIn(('ChecksumMode', 'ENABLED'), self.operations_called[1][1].items())
+
class TestSyncCommandWithS3Express(BaseS3TransferCommandTest):
diff --git a/tests/functional/s3api/test_get_object.py b/tests/functional/s3api/test_get_object.py
index ec32015254d9..5b0e2d53983f 100644
--- a/tests/functional/s3api/test_get_object.py
+++ b/tests/functional/s3api/test_get_object.py
@@ -38,6 +38,7 @@ def test_simple(self):
cmdline += ' outfile'
self.addCleanup(self.remove_file_if_exists, 'outfile')
self.assert_params_for_cmd(cmdline, {'Bucket': 'mybucket',
+ 'ChecksumMode': 'ENABLED',
'Key': 'mykey'})
def test_range(self):
@@ -48,6 +49,7 @@ def test_range(self):
cmdline += ' outfile'
self.addCleanup(self.remove_file_if_exists, 'outfile')
self.assert_params_for_cmd(cmdline, {'Bucket': 'mybucket',
+ 'ChecksumMode': 'ENABLED',
'Key': 'mykey',
'Range': 'bytes=0-499'})
@@ -61,7 +63,9 @@ def test_response_headers(self):
self.addCleanup(self.remove_file_if_exists, 'outfile')
self.assert_params_for_cmd(
cmdline, {
- 'Bucket': 'mybucket', 'Key': 'mykey',
+ 'Bucket': 'mybucket',
+ 'ChecksumMode': 'ENABLED',
+ 'Key': 'mykey',
'ResponseCacheControl': 'No-cache',
'ResponseContentEncoding': 'x-gzip'
}
@@ -83,7 +87,7 @@ def test_streaming_output_arg_with_error_response(self):
cmdline += ' outfile'
self.addCleanup(self.remove_file_if_exists, 'outfile')
self.assert_params_for_cmd(
- cmdline, {'Bucket': 'mybucket', 'Key': 'mykey'})
+ cmdline, {'Bucket': 'mybucket', 'ChecksumMode': 'ENABLED', 'Key': 'mykey'})
if __name__ == "__main__":
diff --git a/tests/unit/customizations/s3/test_copy_params.py b/tests/unit/customizations/s3/test_copy_params.py
index 1f735f96d949..9d637e455e64 100644
--- a/tests/unit/customizations/s3/test_copy_params.py
+++ b/tests/unit/customizations/s3/test_copy_params.py
@@ -50,7 +50,7 @@ def test_simple(self):
cmdline = self.prefix
cmdline += self.file_path
cmdline += ' s3://mybucket/mykey'
- result = {'Bucket': u'mybucket', 'Key': u'mykey'}
+ result = {'Bucket': u'mybucket', 'Key': u'mykey', 'ChecksumAlgorithm': 'CRC32'}
self.assert_params(cmdline, result)
def test_sse(self):
@@ -59,7 +59,7 @@ def test_sse(self):
cmdline += ' s3://mybucket/mykey'
cmdline += ' --sse'
result = {'Bucket': u'mybucket', 'Key': u'mykey',
- 'ServerSideEncryption': 'AES256'}
+ 'ServerSideEncryption': 'AES256', 'ChecksumAlgorithm': 'CRC32'}
self.assert_params(cmdline, result)
def test_storage_class(self):
@@ -68,7 +68,7 @@ def test_storage_class(self):
cmdline += ' s3://mybucket/mykey'
cmdline += ' --storage-class REDUCED_REDUNDANCY'
result = {'Bucket': u'mybucket', 'Key': u'mykey',
- 'StorageClass': u'REDUCED_REDUNDANCY'}
+ 'StorageClass': u'REDUCED_REDUNDANCY', 'ChecksumAlgorithm': 'CRC32'}
self.assert_params(cmdline, result)
def test_standard_ia_storage_class(self):
@@ -77,7 +77,7 @@ def test_standard_ia_storage_class(self):
cmdline += ' s3://mybucket/mykey'
cmdline += ' --storage-class STANDARD_IA'
result = {'Bucket': u'mybucket', 'Key': u'mykey',
- 'StorageClass': u'STANDARD_IA'}
+ 'StorageClass': u'STANDARD_IA', 'ChecksumAlgorithm': 'CRC32'}
self.assert_params(cmdline, result)
def test_glacier_ir_storage_class(self):
@@ -86,7 +86,7 @@ def test_glacier_ir_storage_class(self):
cmdline += ' s3://mybucket/mykey'
cmdline += ' --storage-class GLACIER_IR'
result = {'Bucket': u'mybucket', 'Key': u'mykey',
- 'StorageClass': u'GLACIER_IR'}
+ 'ChecksumAlgorithm': 'CRC32', 'StorageClass': u'GLACIER_IR'}
self.assert_params(cmdline, result)
def test_website_redirect(self):
@@ -96,6 +96,7 @@ def test_website_redirect(self):
cmdline += ' --website-redirect /foobar'
result = {'Bucket': u'mybucket',
'Key': u'mykey',
+ 'ChecksumAlgorithm': 'CRC32',
'WebsiteRedirectLocation': u'/foobar'}
self.assert_params(cmdline, result)
@@ -104,7 +105,7 @@ def test_acl(self):
cmdline += self.file_path
cmdline += ' s3://mybucket/mykey'
cmdline += ' --acl public-read'
- result = {'Bucket': 'mybucket', 'Key': 'mykey', 'ACL': 'public-read'}
+ result = {'Bucket': 'mybucket', 'Key': 'mykey', 'ChecksumAlgorithm': 'CRC32', 'ACL': 'public-read'}
self.assert_params(cmdline, result)
def test_content_params(self):
@@ -116,6 +117,7 @@ def test_content_params(self):
cmdline += ' --cache-control max-age=3600,must-revalidate'
cmdline += ' --content-disposition attachment;filename="fname.ext"'
result = {'Bucket': 'mybucket', 'Key': 'mykey',
+ 'ChecksumAlgorithm': 'CRC32',
'ContentEncoding': 'x-gzip',
'ContentLanguage': 'piglatin',
'ContentDisposition': 'attachment;filename="fname.ext"',
@@ -131,7 +133,8 @@ def test_grants(self):
result = {'Bucket': u'mybucket',
'GrantFullControl': u'alice',
'GrantRead': u'bob',
- 'Key': u'mykey'}
+ 'Key': u'mykey',
+ 'ChecksumAlgorithm': 'CRC32'}
self.assert_params(cmdline, result)
def test_grants_bad(self):
@@ -148,7 +151,7 @@ def test_content_type(self):
cmdline += ' s3://mybucket/mykey'
cmdline += ' --content-type text/xml'
result = {'Bucket': u'mybucket', 'ContentType': u'text/xml',
- 'Key': u'mykey'}
+ 'Key': u'mykey', 'ChecksumAlgorithm': 'CRC32'}
self.assert_params(cmdline, result)