Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-s3transfer for 
openSUSE:Factory checked in at 2025-01-22 16:31:48
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-s3transfer (Old)
 and      /work/SRC/openSUSE:Factory/.python-s3transfer.new.5589 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-s3transfer"

Wed Jan 22 16:31:48 2025 rev:34 rq:1239341 version:0.11.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-s3transfer/python-s3transfer.changes      
2024-12-10 23:46:04.062895439 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-s3transfer.new.5589/python-s3transfer.changes
    2025-01-22 16:32:01.851221336 +0100
@@ -1,0 +2,8 @@
+Mon Jan 20 11:02:04 UTC 2025 - John Paul Adrian Glaubitz 
<[email protected]>
+
+- Update to version 0.11.1
+  * bugfix:Dependencies: Update the floor version of botocore to 1.36.0 to 
match imports.
+- from version 0.11.0
+  * feature:manager: Use CRC32 by default and support user provided 
full-object checksums.
+
+-------------------------------------------------------------------

Old:
----
  s3transfer-0.10.4.tar.gz

New:
----
  s3transfer-0.11.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-s3transfer.spec ++++++
--- /var/tmp/diff_new_pack.fw2UL4/_old  2025-01-22 16:32:02.403244242 +0100
+++ /var/tmp/diff_new_pack.fw2UL4/_new  2025-01-22 16:32:02.403244242 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-s3transfer
 #
-# Copyright (c) 2024 SUSE LLC
+# Copyright (c) 2025 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -18,14 +18,14 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-s3transfer
-Version:        0.10.4
+Version:        0.11.1
 Release:        0
 Summary:        Python S3 transfer manager
 License:        Apache-2.0
 Group:          Development/Languages/Python
 URL:            https://github.com/boto/s3transfer
 Source0:        
https://files.pythonhosted.org/packages/source/s/s3transfer/s3transfer-%{version}.tar.gz
-BuildRequires:  %{python_module botocore >= 1.33.2}
+BuildRequires:  %{python_module botocore >= 1.36.0}
 BuildRequires:  %{python_module pip}
 BuildRequires:  %{python_module pytest}
 BuildRequires:  %{python_module setuptools}
@@ -33,7 +33,7 @@
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
 Requires:       python-requests
-Requires:       (python-botocore >= 1.33.2 with python-botocore <= 2.0.0)
+Requires:       (python-botocore >= 1.36.0 with python-botocore <= 2.0.0)
 BuildArch:      noarch
 %if 0%{?sle_version} >= 150400
 Obsoletes:      python3-s3transfer < %{version}

++++++ s3transfer-0.10.4.tar.gz -> s3transfer-0.11.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/PKG-INFO 
new/s3transfer-0.11.1/PKG-INFO
--- old/s3transfer-0.10.4/PKG-INFO      2024-11-20 20:06:53.222139400 +0100
+++ new/s3transfer-0.11.1/PKG-INFO      2025-01-16 20:14:38.242988600 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: s3transfer
-Version: 0.10.4
+Version: 0.11.1
 Summary: An Amazon S3 Transfer Manager
 Home-page: https://github.com/boto/s3transfer
 Author: Amazon Web Services
@@ -22,9 +22,9 @@
 Requires-Python: >= 3.8
 License-File: LICENSE.txt
 License-File: NOTICE.txt
-Requires-Dist: botocore<2.0a.0,>=1.33.2
+Requires-Dist: botocore<2.0a.0,>=1.36.0
 Provides-Extra: crt
-Requires-Dist: botocore[crt]<2.0a.0,>=1.33.2; extra == "crt"
+Requires-Dist: botocore[crt]<2.0a.0,>=1.36.0; extra == "crt"
 
 =====================================================
 s3transfer - An Amazon S3 Transfer Manager for Python
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer/__init__.py 
new/s3transfer-0.11.1/s3transfer/__init__.py
--- old/s3transfer-0.10.4/s3transfer/__init__.py        2024-11-20 
20:06:53.000000000 +0100
+++ new/s3transfer-0.11.1/s3transfer/__init__.py        2025-01-16 
20:14:38.000000000 +0100
@@ -145,7 +145,7 @@
 from s3transfer.exceptions import RetriesExceededError, S3UploadFailedError
 
 __author__ = 'Amazon Web Services'
-__version__ = '0.10.4'
+__version__ = '0.11.1'
 
 
 class NullHandler(logging.Handler):
@@ -717,6 +717,9 @@
 
     def __init__(self, client, config=None, osutil=None):
         self._client = client
+        self._client.meta.events.register(
+            'before-call.s3.*', self._update_checksum_context
+        )
         if config is None:
             config = TransferConfig()
         self._config = config
@@ -724,6 +727,13 @@
             osutil = OSUtils()
         self._osutil = osutil
 
+    def _update_checksum_context(self, params, **kwargs):
+        request_context = params.get("context", {})
+        checksum_context = request_context.get("checksum", {})
+        if "request_algorithm" in checksum_context:
+            # Force request checksum algorithm in the header if specified.
+            checksum_context["request_algorithm"]["in"] = "header"
+
     def upload_file(
         self, filename, bucket, key, callback=None, extra_args=None
     ):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer/constants.py 
new/s3transfer-0.11.1/s3transfer/constants.py
--- old/s3transfer-0.10.4/s3transfer/constants.py       2024-11-20 
20:03:20.000000000 +0100
+++ new/s3transfer-0.11.1/s3transfer/constants.py       2025-01-16 
20:10:58.000000000 +0100
@@ -26,5 +26,13 @@
     'ExpectedBucketOwner',
 ]
 
+FULL_OBJECT_CHECKSUM_ARGS = [
+    'ChecksumCRC32',
+    'ChecksumCRC32C',
+    'ChecksumCRC64NVME',
+    'ChecksumSHA1',
+    'ChecksumSHA256',
+]
+
 USER_AGENT = f's3transfer/{s3transfer.__version__}'
 PROCESS_USER_AGENT = f'{USER_AGENT} processpool'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer/crt.py 
new/s3transfer-0.11.1/s3transfer/crt.py
--- old/s3transfer-0.10.4/s3transfer/crt.py     2024-11-20 20:06:53.000000000 
+0100
+++ new/s3transfer-0.11.1/s3transfer/crt.py     2025-01-16 20:10:58.000000000 
+0100
@@ -39,7 +39,7 @@
 from botocore.exceptions import NoCredentialsError
 from botocore.utils import ArnParser, InvalidArnException
 
-from s3transfer.constants import MB
+from s3transfer.constants import FULL_OBJECT_CHECKSUM_ARGS, MB
 from s3transfer.exceptions import TransferNotDoneError
 from s3transfer.futures import BaseTransferFuture, BaseTransferMeta
 from s3transfer.manager import TransferManager
@@ -491,6 +491,9 @@
         self._client.meta.events.register(
             'before-send.s3.*', self._make_fake_http_response
         )
+        self._client.meta.events.register(
+            'before-call.s3.*', self._remove_checksum_context
+        )
 
     def _resolve_client_config(self, session, client_kwargs):
         user_provided_config = None
@@ -620,6 +623,11 @@
         error_class = self._client.exceptions.from_code(error_code)
         return error_class(parsed_response, operation_name=operation_name)
 
+    def _remove_checksum_context(self, params, **kwargs):
+        request_context = params.get("context", {})
+        if "checksum" in request_context:
+            del request_context["checksum"]
+
 
 class FakeRawResponse(BytesIO):
     def stream(self, amt=1024, decode_content=None):
@@ -786,13 +794,18 @@
         else:
             call_args.extra_args["Body"] = call_args.fileobj
 
-        checksum_algorithm = call_args.extra_args.pop(
-            'ChecksumAlgorithm', 'CRC32'
-        ).upper()
-        checksum_config = awscrt.s3.S3ChecksumConfig(
-            algorithm=awscrt.s3.S3ChecksumAlgorithm[checksum_algorithm],
-            location=awscrt.s3.S3ChecksumLocation.TRAILER,
-        )
+        checksum_config = None
+        if not any(
+            checksum_arg in call_args.extra_args
+            for checksum_arg in FULL_OBJECT_CHECKSUM_ARGS
+        ):
+            checksum_algorithm = call_args.extra_args.pop(
+                'ChecksumAlgorithm', 'CRC32'
+            ).upper()
+            checksum_config = awscrt.s3.S3ChecksumConfig(
+                algorithm=awscrt.s3.S3ChecksumAlgorithm[checksum_algorithm],
+                location=awscrt.s3.S3ChecksumLocation.TRAILER,
+            )
         # Suppress botocore's automatic MD5 calculation by setting an override
         # value that will get deleted in the BotocoreCRTRequestSerializer.
         # As part of the CRT S3 request, we request the CRT S3 client to
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer/manager.py 
new/s3transfer-0.11.1/s3transfer/manager.py
--- old/s3transfer-0.10.4/s3transfer/manager.py 2024-11-20 20:03:20.000000000 
+0100
+++ new/s3transfer-0.11.1/s3transfer/manager.py 2025-01-16 20:10:58.000000000 
+0100
@@ -16,7 +16,12 @@
 import threading
 
 from s3transfer.bandwidth import BandwidthLimiter, LeakyBucket
-from s3transfer.constants import ALLOWED_DOWNLOAD_ARGS, KB, MB
+from s3transfer.constants import (
+    ALLOWED_DOWNLOAD_ARGS,
+    FULL_OBJECT_CHECKSUM_ARGS,
+    KB,
+    MB,
+)
 from s3transfer.copies import CopySubmissionTask
 from s3transfer.delete import DeleteSubmissionTask
 from s3transfer.download import DownloadSubmissionTask
@@ -35,8 +40,8 @@
     OSUtils,
     SlidingWindowSemaphore,
     TaskSemaphore,
-    add_s3express_defaults,
     get_callbacks,
+    set_default_checksum_algorithm,
     signal_not_transferring,
     signal_transferring,
 )
@@ -157,7 +162,7 @@
 class TransferManager:
     ALLOWED_DOWNLOAD_ARGS = ALLOWED_DOWNLOAD_ARGS
 
-    ALLOWED_UPLOAD_ARGS = [
+    _ALLOWED_SHARED_ARGS = [
         'ACL',
         'CacheControl',
         'ChecksumAlgorithm',
@@ -187,7 +192,16 @@
         'WebsiteRedirectLocation',
     ]
 
-    ALLOWED_COPY_ARGS = ALLOWED_UPLOAD_ARGS + [
+    ALLOWED_UPLOAD_ARGS = (
+        _ALLOWED_SHARED_ARGS
+        + [
+            'ChecksumType',
+            'MpuObjectSize',
+        ]
+        + FULL_OBJECT_CHECKSUM_ARGS
+    )
+
+    ALLOWED_COPY_ARGS = _ALLOWED_SHARED_ARGS + [
         'CopySourceIfMatch',
         'CopySourceIfModifiedSince',
         'CopySourceIfNoneMatch',
@@ -315,13 +329,13 @@
         :rtype: s3transfer.futures.TransferFuture
         :returns: Transfer future representing the upload
         """
-        if extra_args is None:
-            extra_args = {}
+
+        extra_args = extra_args.copy() if extra_args else {}
         if subscribers is None:
             subscribers = []
         self._validate_all_known_args(extra_args, self.ALLOWED_UPLOAD_ARGS)
         self._validate_if_bucket_supported(bucket)
-        self._add_operation_defaults(bucket, extra_args)
+        self._add_operation_defaults(extra_args)
         call_args = CallArgs(
             fileobj=fileobj,
             bucket=bucket,
@@ -504,8 +518,8 @@
                     "must be one of: {}".format(kwarg, ', '.join(allowed))
                 )
 
-    def _add_operation_defaults(self, bucket, extra_args):
-        add_s3express_defaults(bucket, extra_args)
+    def _add_operation_defaults(self, extra_args):
+        set_default_checksum_algorithm(extra_args)
 
     def _submit_transfer(
         self, call_args, submission_task_cls, extra_main_kwargs=None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer/upload.py 
new/s3transfer-0.11.1/s3transfer/upload.py
--- old/s3transfer-0.10.4/s3transfer/upload.py  2024-11-20 20:03:20.000000000 
+0100
+++ new/s3transfer-0.11.1/s3transfer/upload.py  2025-01-16 20:10:58.000000000 
+0100
@@ -14,6 +14,7 @@
 from io import BytesIO
 
 from s3transfer.compat import readable, seekable
+from s3transfer.constants import FULL_OBJECT_CHECKSUM_ARGS
 from s3transfer.futures import IN_MEMORY_UPLOAD_TAG
 from s3transfer.tasks import (
     CompleteMultipartUploadTask,
@@ -512,6 +513,10 @@
 class UploadSubmissionTask(SubmissionTask):
     """Task for submitting tasks to execute an upload"""
 
+    PUT_OBJECT_BLOCKLIST = ["ChecksumType", "MpuObjectSize"]
+
+    CREATE_MULTIPART_BLOCKLIST = FULL_OBJECT_CHECKSUM_ARGS + ["MpuObjectSize"]
+
     UPLOAD_PART_ARGS = [
         'ChecksumAlgorithm',
         'SSECustomerKey',
@@ -527,7 +532,9 @@
         'SSECustomerKeyMD5',
         'RequestPayer',
         'ExpectedBucketOwner',
-    ]
+        'ChecksumType',
+        'MpuObjectSize',
+    ] + FULL_OBJECT_CHECKSUM_ARGS
 
     def _get_upload_input_manager_cls(self, transfer_future):
         """Retrieves a class for managing input for an upload based on file 
type
@@ -621,6 +628,10 @@
     ):
         call_args = transfer_future.meta.call_args
 
+        put_object_extra_args = self._extra_put_object_args(
+            call_args.extra_args
+        )
+
         # Get any tags that need to be associated to the put object task
         put_object_tag = self._get_upload_task_tag(
             upload_input_manager, 'put_object'
@@ -638,7 +649,7 @@
                     ),
                     'bucket': call_args.bucket,
                     'key': call_args.key,
-                    'extra_args': call_args.extra_args,
+                    'extra_args': put_object_extra_args,
                 },
                 is_final=True,
             ),
@@ -656,6 +667,19 @@
     ):
         call_args = transfer_future.meta.call_args
 
+        # When a user provided checksum is passed, set "ChecksumType" to 
"FULL_OBJECT"
+        # and "ChecksumAlgorithm" to the related algorithm.
+        for checksum in FULL_OBJECT_CHECKSUM_ARGS:
+            if checksum in call_args.extra_args:
+                call_args.extra_args["ChecksumType"] = "FULL_OBJECT"
+                call_args.extra_args["ChecksumAlgorithm"] = checksum.replace(
+                    "Checksum", ""
+                )
+
+        create_multipart_extra_args = self._extra_create_multipart_args(
+            call_args.extra_args
+        )
+
         # Submit the request to create a multipart upload.
         create_multipart_future = self._transfer_coordinator.submit(
             request_executor,
@@ -665,7 +689,7 @@
                     'client': client,
                     'bucket': call_args.bucket,
                     'key': call_args.key,
-                    'extra_args': call_args.extra_args,
+                    'extra_args': create_multipart_extra_args,
                 },
             ),
         )
@@ -739,6 +763,16 @@
     def _extra_complete_multipart_args(self, extra_args):
         return get_filtered_dict(extra_args, self.COMPLETE_MULTIPART_ARGS)
 
+    def _extra_create_multipart_args(self, extra_args):
+        return get_filtered_dict(
+            extra_args, blocklisted_keys=self.CREATE_MULTIPART_BLOCKLIST
+        )
+
+    def _extra_put_object_args(self, extra_args):
+        return get_filtered_dict(
+            extra_args, blocklisted_keys=self.PUT_OBJECT_BLOCKLIST
+        )
+
     def _get_upload_task_tag(self, upload_input_manager, operation_name):
         tag = None
         if upload_input_manager.stores_body_in_memory(operation_name):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer/utils.py 
new/s3transfer-0.11.1/s3transfer/utils.py
--- old/s3transfer-0.10.4/s3transfer/utils.py   2024-11-20 20:03:20.000000000 
+0100
+++ new/s3transfer-0.11.1/s3transfer/utils.py   2025-01-16 20:10:58.000000000 
+0100
@@ -26,10 +26,11 @@
     ReadTimeoutError,
     ResponseStreamingError,
 )
-from botocore.httpchecksum import AwsChunkedWrapper
+from botocore.httpchecksum import DEFAULT_CHECKSUM_ALGORITHM, AwsChunkedWrapper
 from botocore.utils import is_s3express_bucket
 
 from s3transfer.compat import SOCKET_ERROR, fallocate, rename_file
+from s3transfer.constants import FULL_OBJECT_CHECKSUM_ARGS
 
 MAX_PARTS = 10000
 # The maximum file size you can upload via S3 per request.
@@ -148,20 +149,27 @@
             callback(bytes_transferred=bytes_transferred)
 
 
-def get_filtered_dict(original_dict, whitelisted_keys):
-    """Gets a dictionary filtered by whitelisted keys
+def get_filtered_dict(
+    original_dict, whitelisted_keys=None, blocklisted_keys=None
+):
+    """Gets a dictionary filtered by whitelisted and blocklisted keys.
 
     :param original_dict: The original dictionary of arguments to source keys
         and values.
     :param whitelisted_key: A list of keys to include in the filtered
         dictionary.
+    :param blocklisted_key: A list of keys to exclude in the filtered
+        dictionary.
 
     :returns: A dictionary containing key/values from the original dictionary
-        whose key was included in the whitelist
+        whose key was included in the whitelist and/or not included in the
+        blocklist.
     """
     filtered_dict = {}
     for key, value in original_dict.items():
-        if key in whitelisted_keys:
+        if (whitelisted_keys and key in whitelisted_keys) or (
+            blocklisted_keys and key not in blocklisted_keys
+        ):
             filtered_dict[key] = value
     return filtered_dict
 
@@ -809,6 +817,17 @@
 
 
 def add_s3express_defaults(bucket, extra_args):
+    """
+    This function has been deprecated, but is kept for backwards compatibility.
+    This function is subject to removal in a future release.
+    """
     if is_s3express_bucket(bucket) and "ChecksumAlgorithm" not in extra_args:
         # Default Transfer Operations to S3Express to use CRC32
         extra_args["ChecksumAlgorithm"] = "crc32"
+
+
+def set_default_checksum_algorithm(extra_args):
+    """Set the default algorithm to CRC32 if not specified by the user."""
+    if any(checksum in extra_args for checksum in FULL_OBJECT_CHECKSUM_ARGS):
+        return
+    extra_args.setdefault("ChecksumAlgorithm", DEFAULT_CHECKSUM_ALGORITHM)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer.egg-info/PKG-INFO 
new/s3transfer-0.11.1/s3transfer.egg-info/PKG-INFO
--- old/s3transfer-0.10.4/s3transfer.egg-info/PKG-INFO  2024-11-20 
20:06:53.000000000 +0100
+++ new/s3transfer-0.11.1/s3transfer.egg-info/PKG-INFO  2025-01-16 
20:14:38.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: s3transfer
-Version: 0.10.4
+Version: 0.11.1
 Summary: An Amazon S3 Transfer Manager
 Home-page: https://github.com/boto/s3transfer
 Author: Amazon Web Services
@@ -22,9 +22,9 @@
 Requires-Python: >= 3.8
 License-File: LICENSE.txt
 License-File: NOTICE.txt
-Requires-Dist: botocore<2.0a.0,>=1.33.2
+Requires-Dist: botocore<2.0a.0,>=1.36.0
 Provides-Extra: crt
-Requires-Dist: botocore[crt]<2.0a.0,>=1.33.2; extra == "crt"
+Requires-Dist: botocore[crt]<2.0a.0,>=1.36.0; extra == "crt"
 
 =====================================================
 s3transfer - An Amazon S3 Transfer Manager for Python
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/s3transfer.egg-info/requires.txt 
new/s3transfer-0.11.1/s3transfer.egg-info/requires.txt
--- old/s3transfer-0.10.4/s3transfer.egg-info/requires.txt      2024-11-20 
20:06:53.000000000 +0100
+++ new/s3transfer-0.11.1/s3transfer.egg-info/requires.txt      2025-01-16 
20:14:38.000000000 +0100
@@ -1,4 +1,4 @@
-botocore<2.0a.0,>=1.33.2
+botocore<2.0a.0,>=1.36.0
 
 [crt]
-botocore[crt]<2.0a.0,>=1.33.2
+botocore[crt]<2.0a.0,>=1.36.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/setup.cfg 
new/s3transfer-0.11.1/setup.cfg
--- old/s3transfer-0.10.4/setup.cfg     2024-11-20 20:06:53.226139300 +0100
+++ new/s3transfer-0.11.1/setup.cfg     2025-01-16 20:14:38.242988600 +0100
@@ -3,10 +3,10 @@
 
 [metadata]
 requires_dist = 
-       botocore>=1.33.2,<2.0a.0
+       botocore>=1.36.0,<2.0a.0
 
 [options.extras_require]
-crt = botocore[crt]>=1.33.2,<2.0a0
+crt = botocore[crt]>=1.36.0,<2.0a0
 
 [egg_info]
 tag_build = 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/setup.py 
new/s3transfer-0.11.1/setup.py
--- old/s3transfer-0.10.4/setup.py      2024-11-20 20:03:20.000000000 +0100
+++ new/s3transfer-0.11.1/setup.py      2025-01-16 20:14:38.000000000 +0100
@@ -9,7 +9,7 @@
 
 
 requires = [
-    'botocore>=1.33.2,<2.0a.0',
+    'botocore>=1.36.0,<2.0a.0',
 ]
 
 
@@ -30,7 +30,7 @@
     include_package_data=True,
     install_requires=requires,
     extras_require={
-        'crt': 'botocore[crt]>=1.33.2,<2.0a.0',
+        'crt': 'botocore[crt]>=1.36.0,<2.0a.0',
     },
     license="Apache License 2.0",
     python_requires=">= 3.8",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/tests/functional/test_crt.py 
new/s3transfer-0.11.1/tests/functional/test_crt.py
--- old/s3transfer-0.10.4/tests/functional/test_crt.py  2024-11-20 
20:06:53.000000000 +0100
+++ new/s3transfer-0.11.1/tests/functional/test_crt.py  2025-01-16 
20:10:58.000000000 +0100
@@ -116,6 +116,7 @@
         expected_body_content=None,
         expected_content_length=None,
         expected_missing_headers=None,
+        expected_extra_headers=None,
     ):
         if expected_host is None:
             expected_host = self.expected_host
@@ -138,12 +139,15 @@
                 crt_http_request.headers.get('Content-Length'),
                 str(expected_content_length),
             )
+        header_names = [
+            header[0].lower() for header in crt_http_request.headers
+        ]
         if expected_missing_headers is not None:
-            header_names = [
-                header[0].lower() for header in crt_http_request.headers
-            ]
             for expected_missing_header in expected_missing_headers:
                 self.assertNotIn(expected_missing_header.lower(), header_names)
+        if expected_extra_headers is not None:
+            for header, value in expected_extra_headers.items():
+                self.assertEqual(crt_http_request.headers.get(header), value)
 
     def _assert_expected_s3express_request(
         self, make_request_kwargs, expected_http_method='GET'
@@ -447,6 +451,37 @@
             expected_http_method='PUT',
         )
 
+    def test_upload_with_full_checksum(self):
+        future = self.transfer_manager.upload(
+            self.filename,
+            self.bucket,
+            self.key,
+            {"ChecksumCRC32": "abc123"},
+            [self.record_subscriber],
+        )
+        future.result()
+
+        callargs_kwargs = self.s3_crt_client.make_request.call_args[1]
+        self.assertEqual(
+            callargs_kwargs,
+            {
+                'request': mock.ANY,
+                'type': awscrt.s3.S3RequestType.PUT_OBJECT,
+                'send_filepath': self.filename,
+                'on_progress': mock.ANY,
+                'on_done': mock.ANY,
+                'checksum_config': None,
+            },
+        )
+        self._assert_expected_crt_http_request(
+            callargs_kwargs["request"],
+            expected_http_method='PUT',
+            expected_content_length=len(self.expected_content),
+            expected_missing_headers=['Content-MD5'],
+            expected_extra_headers={"x-amz-checksum-crc32": "abc123"},
+        )
+        self._assert_subscribers_called(future)
+
     def test_download(self):
         future = self.transfer_manager.download(
             self.bucket, self.key, self.filename, {}, [self.record_subscriber]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/tests/functional/test_upload.py 
new/s3transfer-0.11.1/tests/functional/test_upload.py
--- old/s3transfer-0.10.4/tests/functional/test_upload.py       2024-11-20 
20:03:20.000000000 +0100
+++ new/s3transfer-0.11.1/tests/functional/test_upload.py       2025-01-16 
20:10:58.000000000 +0100
@@ -22,6 +22,7 @@
 from botocore.stub import ANY
 
 from s3transfer.manager import TransferConfig, TransferManager
+from s3transfer.upload import UploadSubmissionTask
 from s3transfer.utils import ChunksizeAdjuster
 from tests import (
     BaseGeneralInterfaceTest,
@@ -147,7 +148,12 @@
         if bucket is None:
             bucket = self.bucket
 
-        expected_params = {'Body': ANY, 'Bucket': bucket, 'Key': self.key}
+        expected_params = {
+            'Body': ANY,
+            'Bucket': bucket,
+            'Key': self.key,
+            'ChecksumAlgorithm': 'CRC32',
+        }
         if extra_expected_params:
             expected_params.update(extra_expected_params)
         upload_response = self.create_stubbed_responses()[0]
@@ -186,7 +192,6 @@
         self.assertFalse("ChecksumAlgorithm" in self.extra_args)
 
         self.add_put_object_response_with_default_expected_params(
-            extra_expected_params={'ChecksumAlgorithm': 'crc32'},
             bucket=s3express_bucket,
         )
         future = self.manager.upload(
@@ -278,7 +283,12 @@
 
     def test_allowed_upload_params_are_valid(self):
         op_model = self.client.meta.service_model.operation_model('PutObject')
-        for allowed_upload_arg in self._manager.ALLOWED_UPLOAD_ARGS:
+        allowed_upload_arg = [
+            arg
+            for arg in self._manager.ALLOWED_UPLOAD_ARGS
+            if arg not in UploadSubmissionTask.PUT_OBJECT_BLOCKLIST
+        ]
+        for allowed_upload_arg in allowed_upload_arg:
             self.assertIn(allowed_upload_arg, op_model.input_shape.members)
 
     def test_upload_with_bandwidth_limiter(self):
@@ -367,7 +377,11 @@
         if bucket is None:
             bucket = self.bucket
 
-        expected_params = {'Bucket': bucket, 'Key': self.key}
+        expected_params = {
+            'Bucket': bucket,
+            'Key': self.key,
+            'ChecksumAlgorithm': 'CRC32',
+        }
         if extra_expected_params:
             expected_params.update(extra_expected_params)
         response = self.create_stubbed_responses()[0]
@@ -392,15 +406,15 @@
                 'UploadId': self.multipart_id,
                 'Body': ANY,
                 'PartNumber': i + 1,
+                'ChecksumAlgorithm': 'CRC32',
             }
             if extra_expected_params:
                 expected_params.update(extra_expected_params)
-                # If ChecksumAlgorithm is present stub the response checksums
-                if 'ChecksumAlgorithm' in extra_expected_params:
-                    name = extra_expected_params['ChecksumAlgorithm']
-                    checksum_member = f'Checksum{name.upper()}'
-                    response = upload_part_response['service_response']
-                    response[checksum_member] = 'sum%s==' % (i + 1)
+
+            name = expected_params['ChecksumAlgorithm']
+            checksum_member = f'Checksum{name.upper()}'
+            response = upload_part_response['service_response']
+            response[checksum_member] = f'sum{i+1}=='
 
             upload_part_response['expected_params'] = expected_params
             self.stubber.add_response(**upload_part_response)
@@ -419,9 +433,21 @@
             'UploadId': self.multipart_id,
             'MultipartUpload': {
                 'Parts': [
-                    {'ETag': 'etag-1', 'PartNumber': 1},
-                    {'ETag': 'etag-2', 'PartNumber': 2},
-                    {'ETag': 'etag-3', 'PartNumber': 3},
+                    {
+                        'ETag': 'etag-1',
+                        'PartNumber': 1,
+                        'ChecksumCRC32': 'sum1==',
+                    },
+                    {
+                        'ETag': 'etag-2',
+                        'PartNumber': 2,
+                        'ChecksumCRC32': 'sum2==',
+                    },
+                    {
+                        'ETag': 'etag-3',
+                        'PartNumber': 3,
+                        'ChecksumCRC32': 'sum3==',
+                    },
                 ]
             },
         }
@@ -539,17 +565,22 @@
         self.stubber.add_response(
             method='create_multipart_upload',
             service_response={'UploadId': self.multipart_id},
-            expected_params={'Bucket': self.bucket, 'Key': self.key},
+            expected_params={
+                'Bucket': self.bucket,
+                'Key': self.key,
+                'ChecksumAlgorithm': 'CRC32',
+            },
         )
         self.stubber.add_response(
             method='upload_part',
-            service_response={'ETag': 'etag-1'},
+            service_response={'ETag': 'etag-1', 'ChecksumCRC32': 'sum1=='},
             expected_params={
                 'Bucket': self.bucket,
                 'Body': ANY,
                 'Key': self.key,
                 'UploadId': self.multipart_id,
                 'PartNumber': 1,
+                'ChecksumAlgorithm': 'CRC32',
             },
         )
         # With the upload part failing this should immediately initiate
@@ -639,13 +670,13 @@
 
         # ChecksumAlgorithm should be passed on the create_multipart call
         self.add_create_multipart_response_with_default_expected_params(
-            extra_expected_params={'ChecksumAlgorithm': 'crc32'},
+            extra_expected_params={'ChecksumAlgorithm': 'CRC32'},
             bucket=s3express_bucket,
         )
 
         # ChecksumAlgorithm should be forwarded and a SHA1 will come back
         self.add_upload_part_responses_with_default_expected_params(
-            extra_expected_params={'ChecksumAlgorithm': 'crc32'},
+            extra_expected_params={'ChecksumAlgorithm': 'CRC32'},
             bucket=s3express_bucket,
         )
 
@@ -700,6 +731,31 @@
         self.add_complete_multipart_response_with_default_expected_params(
             extra_expected_params=params
         )
+        future = self.manager.upload(
+            self.filename, self.bucket, self.key, self.extra_args
+        )
+        future.result()
+        self.assert_expected_client_calls_were_correct()
+
+    def test_multipart_upload_with_full_object_checksum_args(self):
+        checksum_type_param = {
+            'ChecksumType': 'FULL_OBJECT',
+        }
+        params = {
+            'ChecksumCRC32': 'example-checksum-value',
+            'MpuObjectSize': 12345,
+        }
+        params.update(checksum_type_param)
+        self.extra_args.update(params)
+
+        self.add_create_multipart_response_with_default_expected_params(
+            extra_expected_params=checksum_type_param
+        )
+
+        self.add_upload_part_responses_with_default_expected_params()
+        self.add_complete_multipart_response_with_default_expected_params(
+            extra_expected_params=params
+        )
         future = self.manager.upload(
             self.filename, self.bucket, self.key, self.extra_args
         )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.10.4/tests/unit/test_utils.py 
new/s3transfer-0.11.1/tests/unit/test_utils.py
--- old/s3transfer-0.10.4/tests/unit/test_utils.py      2024-11-20 
20:03:20.000000000 +0100
+++ new/s3transfer-0.11.1/tests/unit/test_utils.py      2025-01-16 
20:10:58.000000000 +0100
@@ -38,13 +38,13 @@
     SlidingWindowSemaphore,
     StreamReaderProgress,
     TaskSemaphore,
-    add_s3express_defaults,
     calculate_num_parts,
     calculate_range_parameter,
     get_callbacks,
     get_filtered_dict,
     invoke_progress_callbacks,
     random_file_extension,
+    set_default_checksum_algorithm,
 )
 from tests import NonSeekableWriter, RecordingSubscriber, mock, unittest
 
@@ -1192,32 +1192,44 @@
         self.assertEqual(new_size, MAX_SINGLE_UPLOAD_SIZE)
 
 
-class TestS3ExpressDefaults:
+class TestS3Defaults:
     @pytest.mark.parametrize(
         "bucket,extra_args,expected",
         (
             (
                 "mytestbucket--usw2-az2--x-s3",
                 {},
-                {"ChecksumAlgorithm": "crc32"},
+                {"ChecksumAlgorithm": "CRC32"},
             ),
             (
                 "mytestbucket--usw2-az2--x-s3",
                 {"Some": "Setting"},
-                {"ChecksumAlgorithm": "crc32", "Some": "Setting"},
+                {"ChecksumAlgorithm": "CRC32", "Some": "Setting"},
+            ),
+            (
+                "mytestbucket--usw2-az2--x-s3",
+                {"ChecksumAlgorithm": "sha256"},
+                {"ChecksumAlgorithm": "sha256"},
             ),
             (
                 "mytestbucket",
                 {},
-                {},
+                {"ChecksumAlgorithm": "CRC32"},
             ),
             (
-                "mytestbucket--usw2-az2--x-s3",
+                "mytestbucket",
+                {"Some": "Setting"},
+                {"ChecksumAlgorithm": "CRC32", "Some": "Setting"},
+            ),
+            (
+                "mytestbucket",
                 {"ChecksumAlgorithm": "sha256"},
                 {"ChecksumAlgorithm": "sha256"},
             ),
         ),
     )
-    def test_add_s3express_defaults(self, bucket, extra_args, expected):
-        add_s3express_defaults(bucket, extra_args)
+    def test_set_default_checksum_algorithm(
+        self, bucket, extra_args, expected
+    ):
+        set_default_checksum_algorithm(extra_args)
         assert extra_args == expected

Reply via email to