Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package python-s3transfer for openSUSE:Factory checked in at 2024-10-23 21:08:06 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-s3transfer (Old) and /work/SRC/openSUSE:Factory/.python-s3transfer.new.26871 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-s3transfer" Wed Oct 23 21:08:06 2024 rev:32 rq:1217350 version:0.10.3 Changes: -------- --- /work/SRC/openSUSE:Factory/python-s3transfer/python-s3transfer.changes 2024-07-04 16:22:53.322665616 +0200 +++ /work/SRC/openSUSE:Factory/.python-s3transfer.new.26871/python-s3transfer.changes 2024-10-23 21:08:20.717875955 +0200 @@ -1,0 +2,6 @@ +Wed Oct 23 10:14:09 UTC 2024 - John Paul Adrian Glaubitz <[email protected]> + +- Update to version 0.10.3 + * enhancement:Python: Added provisional Python 3.13 support to s3transfer + +------------------------------------------------------------------- Old: ---- s3transfer-0.10.2.tar.gz New: ---- s3transfer-0.10.3.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-s3transfer.spec ++++++ --- /var/tmp/diff_new_pack.ILUwRk/_old 2024-10-23 21:08:21.309900534 +0200 +++ /var/tmp/diff_new_pack.ILUwRk/_new 2024-10-23 21:08:21.313900700 +0200 @@ -18,7 +18,7 @@ %{?sle15_python_module_pythons} Name: python-s3transfer -Version: 0.10.2 +Version: 0.10.3 Release: 0 Summary: Python S3 transfer manager License: Apache-2.0 ++++++ s3transfer-0.10.2.tar.gz -> s3transfer-0.10.3.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/PKG-INFO new/s3transfer-0.10.3/PKG-INFO --- old/s3transfer-0.10.2/PKG-INFO 2024-06-24 23:03:01.450911300 +0200 +++ new/s3transfer-0.10.3/PKG-INFO 2024-10-08 20:06:31.916945200 +0200 @@ -1,12 +1,11 @@ Metadata-Version: 2.1 Name: s3transfer -Version: 0.10.2 +Version: 0.10.3 Summary: An Amazon S3 Transfer Manager Home-page: https://github.com/boto/s3transfer Author: Amazon Web Services Author-email: [email protected] License: Apache License 2.0 -Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Intended Audience :: Developers Classifier: Natural Language :: English @@ -19,10 +18,13 @@ Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 Requires-Python: >= 3.8 -Provides-Extra: crt License-File: LICENSE.txt License-File: NOTICE.txt +Requires-Dist: botocore<2.0a.0,>=1.33.2 +Provides-Extra: crt +Requires-Dist: botocore[crt]<2.0a.0,>=1.33.2; extra == "crt" ===================================================== s3transfer - An Amazon S3 Transfer Manager for Python @@ -37,5 +39,3 @@ production, make sure to lock to a minor version as interfaces may break from minor version to minor version. For a basic, stable interface of s3transfer, try the interfaces exposed in `boto3 <https://boto3.readthedocs.io/en/latest/guide/s3.html#using-the-transfer-manager>`__ - - diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/pyproject.toml new/s3transfer-0.10.3/pyproject.toml --- old/s3transfer-0.10.2/pyproject.toml 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/pyproject.toml 2024-10-08 20:06:31.000000000 +0200 @@ -3,12 +3,63 @@ "slow: marks tests as slow", ] -[tool.isort] -profile = "black" -line_length = 79 -honor_noqa = true -src_paths = ["s3transfer", "tests"] +[tool.ruff] +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] -[tool.black] +# Format same as Black. line-length = 79 -skip_string_normalization = true +indent-width = 4 + +target-version = "py38" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F", "I", "UP"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings, spaces for indents +# and trailing commas. +quote-style = "preserve" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +docstring-code-format = false +docstring-code-line-length = "dynamic" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/__init__.py new/s3transfer-0.10.3/s3transfer/__init__.py --- old/s3transfer-0.10.2/s3transfer/__init__.py 2024-06-24 23:03:01.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/__init__.py 2024-10-08 20:06:31.000000000 +0200 @@ -123,6 +123,7 @@ """ + import concurrent.futures import functools import logging @@ -144,7 +145,7 @@ from s3transfer.exceptions import RetriesExceededError, S3UploadFailedError __author__ = 'Amazon Web Services' -__version__ = '0.10.2' +__version__ = '0.10.3' class NullHandler(logging.Handler): @@ -813,8 +814,8 @@ for kwarg in actual: if kwarg not in allowed: raise ValueError( - "Invalid extra_args key '%s', " - "must be one of: %s" % (kwarg, ', '.join(allowed)) + f"Invalid extra_args key '{kwarg}', " + f"must be one of: {', '.join(allowed)}" ) def _ranged_download( diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/bandwidth.py new/s3transfer-0.10.3/s3transfer/bandwidth.py --- old/s3transfer-0.10.2/s3transfer/bandwidth.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/bandwidth.py 2024-10-08 20:06:31.000000000 +0200 @@ -30,9 +30,7 @@ """ self.requested_amt = requested_amt self.retry_time = retry_time - msg = 'Request amount {} exceeded the amount available. Retry in {}'.format( - requested_amt, retry_time - ) + msg = f'Request amount {requested_amt} exceeded the amount available. Retry in {retry_time}' super().__init__(msg) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/constants.py new/s3transfer-0.10.3/s3transfer/constants.py --- old/s3transfer-0.10.2/s3transfer/constants.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/constants.py 2024-10-08 20:06:31.000000000 +0200 @@ -26,5 +26,5 @@ 'ExpectedBucketOwner', ] -USER_AGENT = 's3transfer/%s' % s3transfer.__version__ -PROCESS_USER_AGENT = '%s processpool' % USER_AGENT +USER_AGENT = f's3transfer/{s3transfer.__version__}' +PROCESS_USER_AGENT = f'{USER_AGENT} processpool' diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/copies.py new/s3transfer-0.10.3/s3transfer/copies.py --- old/s3transfer-0.10.2/s3transfer/copies.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/copies.py 2024-10-08 20:06:31.000000000 +0200 @@ -280,7 +280,7 @@ raise TypeError( 'Expecting dictionary formatted: ' '{"Bucket": bucket_name, "Key": key} ' - 'but got %s or type %s.' % (copy_source, type(copy_source)) + f'but got {copy_source} or type {type(copy_source)}.' ) def _extra_upload_part_args(self, extra_args): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/download.py new/s3transfer-0.10.3/s3transfer/download.py --- old/s3transfer-0.10.2/s3transfer/download.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/download.py 2024-10-08 20:06:31.000000000 +0200 @@ -307,9 +307,7 @@ if download_manager_cls.is_compatible(fileobj, osutil): return download_manager_cls raise RuntimeError( - 'Output {} of type: {} is not supported.'.format( - fileobj, type(fileobj) - ) + f'Output {fileobj} of type: {type(fileobj)} is not supported.' ) def _submit( diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/futures.py new/s3transfer-0.10.3/s3transfer/futures.py --- old/s3transfer-0.10.2/s3transfer/futures.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/futures.py 2024-10-08 20:06:31.000000000 +0200 @@ -175,9 +175,7 @@ self._failure_cleanups_lock = threading.Lock() def __repr__(self): - return '{}(transfer_id={})'.format( - self.__class__.__name__, self.transfer_id - ) + return f'{self.__class__.__name__}(transfer_id={self.transfer_id})' @property def exception(self): @@ -295,8 +293,8 @@ with self._lock: if self.done(): raise RuntimeError( - 'Unable to transition from done state %s to non-done ' - 'state %s.' % (self.status, desired_state) + f'Unable to transition from done state {self.status} to non-done ' + f'state {desired_state}.' ) self._status = desired_state @@ -316,9 +314,7 @@ :returns: A future representing the submitted task """ logger.debug( - "Submitting task {} to executor {} for transfer request: {}.".format( - task, executor, self.transfer_id - ) + f"Submitting task {task} to executor {executor} for transfer request: {self.transfer_id}." ) future = executor.submit(task, tag=tag) # Add this created future to the list of associated future just @@ -400,7 +396,7 @@ # We do not want a callback interrupting the process, especially # in the failure cleanups. So log and catch, the exception. except Exception: - logger.debug("Exception raised in %s." % callback, exc_info=True) + logger.debug(f"Exception raised in {callback}.", exc_info=True) class BoundedExecutor: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/manager.py new/s3transfer-0.10.3/s3transfer/manager.py --- old/s3transfer-0.10.2/s3transfer/manager.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/manager.py 2024-10-08 20:06:31.000000000 +0200 @@ -149,8 +149,8 @@ for attr, attr_val in self.__dict__.items(): if attr_val is not None and attr_val <= 0: raise ValueError( - 'Provided parameter %s of value %s must be greater than ' - '0.' % (attr, attr_val) + f'Provided parameter {attr} of value {attr_val} must ' + 'be greater than 0.' ) @@ -492,16 +492,16 @@ match = pattern.match(bucket) if match: raise ValueError( - 'TransferManager methods do not support %s ' - 'resource. Use direct client calls instead.' % resource + f'TransferManager methods do not support {resource} ' + 'resource. Use direct client calls instead.' ) def _validate_all_known_args(self, actual, allowed): for kwarg in actual: if kwarg not in allowed: raise ValueError( - "Invalid extra_args key '%s', " - "must be one of: %s" % (kwarg, ', '.join(allowed)) + "Invalid extra_args key '{}', " + "must be one of: {}".format(kwarg, ', '.join(allowed)) ) def _add_operation_defaults(self, bucket, extra_args): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/processpool.py new/s3transfer-0.10.3/s3transfer/processpool.py --- old/s3transfer-0.10.2/s3transfer/processpool.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/processpool.py 2024-10-08 20:06:31.000000000 +0200 @@ -192,6 +192,7 @@ are using ``us-west-2`` as their region. """ + import collections import contextlib import logging diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/subscribers.py new/s3transfer-0.10.3/s3transfer/subscribers.py --- old/s3transfer-0.10.2/s3transfer/subscribers.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/subscribers.py 2024-10-08 20:06:31.000000000 +0200 @@ -30,20 +30,19 @@ return super().__new__(cls) @classmethod - @lru_cache() + @lru_cache def _validate_subscriber_methods(cls): for subscriber_type in cls.VALID_SUBSCRIBER_TYPES: subscriber_method = getattr(cls, 'on_' + subscriber_type) if not callable(subscriber_method): raise InvalidSubscriberMethodError( - 'Subscriber method %s must be callable.' - % subscriber_method + f'Subscriber method {subscriber_method} must be callable.' ) if not accepts_kwargs(subscriber_method): raise InvalidSubscriberMethodError( - 'Subscriber method %s must accept keyword ' - 'arguments (**kwargs)' % subscriber_method + f'Subscriber method {subscriber_method} must accept keyword ' + 'arguments (**kwargs)' ) def on_queued(self, future, **kwargs): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/tasks.py new/s3transfer-0.10.3/s3transfer/tasks.py --- old/s3transfer-0.10.2/s3transfer/tasks.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/tasks.py 2024-10-08 20:06:31.000000000 +0200 @@ -96,11 +96,7 @@ main_kwargs_to_display = self._get_kwargs_with_params_to_include( self._main_kwargs, params_to_display ) - return '{}(transfer_id={}, {})'.format( - self.__class__.__name__, - self._transfer_coordinator.transfer_id, - main_kwargs_to_display, - ) + return f'{self.__class__.__name__}(transfer_id={self._transfer_coordinator.transfer_id}, {main_kwargs_to_display})' @property def transfer_id(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/upload.py new/s3transfer-0.10.3/s3transfer/upload.py --- old/s3transfer-0.10.2/s3transfer/upload.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/upload.py 2024-10-08 20:06:31.000000000 +0200 @@ -550,9 +550,7 @@ if upload_manager_cls.is_compatible(fileobj): return upload_manager_cls raise RuntimeError( - 'Input {} of type: {} is not supported.'.format( - fileobj, type(fileobj) - ) + f'Input {fileobj} of type: {type(fileobj)} is not supported.' ) def _submit( diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer/utils.py new/s3transfer-0.10.3/s3transfer/utils.py --- old/s3transfer-0.10.2/s3transfer/utils.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer/utils.py 2024-10-08 20:06:31.000000000 +0200 @@ -191,9 +191,7 @@ self._kwargs = kwargs def __repr__(self): - return 'Function: {} with args {} and kwargs {}'.format( - self._func, self._args, self._kwargs - ) + return f'Function: {self._func} with args {self._args} and kwargs {self._kwargs}' def __call__(self): return self._func(*self._args, **self._kwargs) @@ -636,7 +634,7 @@ """ logger.debug("Acquiring %s", tag) if not self._semaphore.acquire(blocking): - raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag) + raise NoResourcesAvailable(f"Cannot acquire tag '{tag}'") def release(self, tag, acquire_token): """Release the semaphore @@ -694,7 +692,7 @@ try: if self._count == 0: if not blocking: - raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag) + raise NoResourcesAvailable(f"Cannot acquire tag '{tag}'") else: while self._count == 0: self._condition.wait() @@ -716,7 +714,7 @@ self._condition.acquire() try: if tag not in self._tag_sequences: - raise ValueError("Attempted to release unknown tag: %s" % tag) + raise ValueError(f"Attempted to release unknown tag: {tag}") max_sequence = self._tag_sequences[tag] if self._lowest_sequence[tag] == sequence_number: # We can immediately process this request and free up @@ -743,7 +741,7 @@ else: raise ValueError( "Attempted to release unknown sequence number " - "%s for tag: %s" % (sequence_number, tag) + f"{sequence_number} for tag: {tag}" ) finally: self._condition.release() @@ -781,13 +779,13 @@ if current_chunksize > self.max_size: logger.debug( "Chunksize greater than maximum chunksize. " - "Setting to %s from %s." % (self.max_size, current_chunksize) + f"Setting to {self.max_size} from {current_chunksize}." ) return self.max_size elif current_chunksize < self.min_size: logger.debug( "Chunksize less than minimum chunksize. " - "Setting to %s from %s." % (self.min_size, current_chunksize) + f"Setting to {self.min_size} from {current_chunksize}." ) return self.min_size else: @@ -804,8 +802,7 @@ if chunksize != current_chunksize: logger.debug( "Chunksize would result in the number of parts exceeding the " - "maximum. Setting to %s from %s." - % (chunksize, current_chunksize) + f"maximum. Setting to {chunksize} from {current_chunksize}." ) return chunksize diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/s3transfer.egg-info/PKG-INFO new/s3transfer-0.10.3/s3transfer.egg-info/PKG-INFO --- old/s3transfer-0.10.2/s3transfer.egg-info/PKG-INFO 2024-06-24 23:03:01.000000000 +0200 +++ new/s3transfer-0.10.3/s3transfer.egg-info/PKG-INFO 2024-10-08 20:06:31.000000000 +0200 @@ -1,12 +1,11 @@ Metadata-Version: 2.1 Name: s3transfer -Version: 0.10.2 +Version: 0.10.3 Summary: An Amazon S3 Transfer Manager Home-page: https://github.com/boto/s3transfer Author: Amazon Web Services Author-email: [email protected] License: Apache License 2.0 -Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Intended Audience :: Developers Classifier: Natural Language :: English @@ -19,10 +18,13 @@ Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 Requires-Python: >= 3.8 -Provides-Extra: crt License-File: LICENSE.txt License-File: NOTICE.txt +Requires-Dist: botocore<2.0a.0,>=1.33.2 +Provides-Extra: crt +Requires-Dist: botocore[crt]<2.0a.0,>=1.33.2; extra == "crt" ===================================================== s3transfer - An Amazon S3 Transfer Manager for Python @@ -37,5 +39,3 @@ production, make sure to lock to a minor version as interfaces may break from minor version to minor version. For a basic, stable interface of s3transfer, try the interfaces exposed in `boto3 <https://boto3.readthedocs.io/en/latest/guide/s3.html#using-the-transfer-manager>`__ - - diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/setup.cfg new/s3transfer-0.10.3/setup.cfg --- old/s3transfer-0.10.2/setup.cfg 2024-06-24 23:03:01.450911300 +0200 +++ new/s3transfer-0.10.3/setup.cfg 2024-10-08 20:06:31.916945200 +0200 @@ -8,9 +8,6 @@ [options.extras_require] crt = botocore[crt]>=1.33.2,<2.0a0 -[flake8] -ignore = E203,E226,E501,W503,W504 - [egg_info] tag_build = tag_date = 0 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/setup.py new/s3transfer-0.10.3/setup.py --- old/s3transfer-0.10.2/setup.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/setup.py 2024-10-08 20:06:31.000000000 +0200 @@ -47,5 +47,6 @@ 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', ], ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/__init__.py new/s3transfer-0.10.3/tests/__init__.py --- old/s3transfer-0.10.2/tests/__init__.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/__init__.py 2024-10-08 20:06:31.000000000 +0200 @@ -72,9 +72,7 @@ second_md5 = md5_checksum(second) if first_md5 != second_md5: raise AssertionError( - "Files are not equal: {}(md5={}) != {}(md5={})".format( - first, first_md5, second, second_md5 - ) + f"Files are not equal: {first}(md5={first_md5}) != {second}(md5={second_md5})" ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/functional/test_copy.py new/s3transfer-0.10.3/tests/functional/test_copy.py --- old/s3transfer-0.10.2/tests/functional/test_copy.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/functional/test_copy.py 2024-10-08 20:06:31.000000000 +0200 @@ -96,9 +96,9 @@ # Add the expected create multipart upload params. if expected_create_mpu_params: - stubbed_responses[0][ - 'expected_params' - ] = expected_create_mpu_params + stubbed_responses[0]['expected_params'] = ( + expected_create_mpu_params + ) # Add any expected copy parameters. if expected_copy_params: @@ -110,9 +110,9 @@ # Add the expected complete multipart upload params. if expected_complete_mpu_params: - stubbed_responses[-1][ - 'expected_params' - ] = expected_complete_mpu_params + stubbed_responses[-1]['expected_params'] = ( + expected_complete_mpu_params + ) # Add the responses to the stubber. for stubbed_response in stubbed_responses: @@ -396,7 +396,7 @@ if extra_expected_params: if 'ChecksumAlgorithm' in extra_expected_params: name = extra_expected_params['ChecksumAlgorithm'] - checksum_member = 'Checksum%s' % name.upper() + checksum_member = f'Checksum{name.upper()}' response = upload_part_response['service_response'] response['CopyPartResult'][checksum_member] = 'sum%s==' % ( i + 1 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/functional/test_crt.py new/s3transfer-0.10.3/tests/functional/test_crt.py --- old/s3transfer-0.10.2/tests/functional/test_crt.py 2024-06-24 23:03:01.000000000 +0200 +++ new/s3transfer-0.10.3/tests/functional/test_crt.py 2024-10-08 20:06:31.000000000 +0200 @@ -77,7 +77,7 @@ 'myfile', self.expected_content, mode='wb' ) self.expected_path = "/" + self.bucket + "/" + self.key - self.expected_host = "s3.%s.amazonaws.com" % (self.region) + self.expected_host = f"s3.{self.region}.amazonaws.com" self.expected_s3express_host = f'{self.s3express_bucket}.s3express-usw2-az5.us-west-2.amazonaws.com' self.expected_s3express_path = f'/{self.key}' self.s3_request = mock.Mock(awscrt.s3.S3Request) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/functional/test_download.py new/s3transfer-0.10.3/tests/functional/test_download.py --- old/s3transfer-0.10.2/tests/functional/test_download.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/functional/test_download.py 2024-10-08 20:06:31.000000000 +0200 @@ -116,9 +116,9 @@ expected_params ) if expected_ranges: - stubbed_response['expected_params'][ - 'Range' - ] = expected_ranges[i] + stubbed_response['expected_params']['Range'] = ( + expected_ranges[i] + ) self.stubber.add_response(**stubbed_response) def add_n_retryable_get_object_responses(self, n, num_reads=0): @@ -141,7 +141,7 @@ # Make sure the file exists self.assertTrue(os.path.exists(self.filename)) # Make sure the random temporary file does not exist - possible_matches = glob.glob('%s*' % self.filename + os.extsep) + possible_matches = glob.glob(f'{self.filename}*' + os.extsep) self.assertEqual(possible_matches, []) def test_download_for_fileobj(self): @@ -201,7 +201,7 @@ future.result() # Make sure the actual file and the temporary do not exist # by globbing for the file and any of its extensions - possible_matches = glob.glob('%s*' % self.filename) + possible_matches = glob.glob(f'{self.filename}*') self.assertEqual(possible_matches, []) def test_download_with_nonexistent_directory(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/functional/test_processpool.py new/s3transfer-0.10.3/tests/functional/test_processpool.py --- old/s3transfer-0.10.2/tests/functional/test_processpool.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/functional/test_processpool.py 2024-10-08 20:06:31.000000000 +0200 @@ -212,7 +212,7 @@ ) self.assertFalse(os.path.exists(self.filename)) # Any tempfile should have been erased as well - possible_matches = glob.glob('%s*' % self.filename + os.extsep) + possible_matches = glob.glob(f'{self.filename}*' + os.extsep) self.assertEqual(possible_matches, []) def test_validates_extra_args(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/functional/test_upload.py new/s3transfer-0.10.3/tests/functional/test_upload.py --- old/s3transfer-0.10.2/tests/functional/test_upload.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/functional/test_upload.py 2024-10-08 20:06:31.000000000 +0200 @@ -92,7 +92,7 @@ data=params['Body'], ) self.client.meta.events.emit( - 'request-created.s3.%s' % model.name, + f'request-created.s3.{model.name}', request=request, operation_name=model.name, ) @@ -398,7 +398,7 @@ # If ChecksumAlgorithm is present stub the response checksums if 'ChecksumAlgorithm' in extra_expected_params: name = extra_expected_params['ChecksumAlgorithm'] - checksum_member = 'Checksum%s' % name.upper() + checksum_member = f'Checksum{name.upper()}' response = upload_part_response['service_response'] response[checksum_member] = 'sum%s==' % (i + 1) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/integration/test_crt.py new/s3transfer-0.10.3/tests/integration/test_crt.py --- old/s3transfer-0.10.2/tests/integration/test_crt.py 2024-06-24 23:03:01.000000000 +0200 +++ new/s3transfer-0.10.3/tests/integration/test_crt.py 2024-10-08 20:06:31.000000000 +0200 @@ -522,6 +522,6 @@ future.result() self.assertEqual(err.name, 'AWS_ERROR_S3_CANCELED') - possible_matches = glob.glob('%s*' % download_path) + possible_matches = glob.glob(f'{download_path}*') self.assertEqual(possible_matches, []) self._assert_subscribers_called() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/integration/test_download.py new/s3transfer-0.10.3/tests/integration/test_download.py --- old/s3transfer-0.10.2/tests/integration/test_download.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/integration/test_download.py 2024-10-08 20:06:31.000000000 +0200 @@ -98,7 +98,7 @@ future.cancel() raise RuntimeError( "Download transfer did not start after waiting for " - "%s seconds." % timeout + f"{timeout} seconds." ) # Raise an exception which should cause the preceding # download to cancel and exit quickly @@ -115,9 +115,7 @@ self.assertLess( actual_time_to_exit, max_allowed_exit_time, - "Failed to exit under {}. Instead exited in {}.".format( - max_allowed_exit_time, actual_time_to_exit - ), + f"Failed to exit under {max_allowed_exit_time}. Instead exited in {actual_time_to_exit}.", ) # Make sure the future was cancelled because of the KeyboardInterrupt @@ -126,7 +124,7 @@ # Make sure the actual file and the temporary do not exist # by globbing for the file and any of its extensions - possible_matches = glob.glob('%s*' % download_path) + possible_matches = glob.glob(f'{download_path}*') self.assertEqual(possible_matches, []) @skip_if_using_serial_implementation( @@ -174,9 +172,7 @@ self.assertLess( end_time - start_time, max_allowed_exit_time, - "Failed to exit under {}. Instead exited in {}.".format( - max_allowed_exit_time, end_time - start_time - ), + f"Failed to exit under {max_allowed_exit_time}. Instead exited in {end_time - start_time}.", ) # Make sure at least one of the futures got cancelled @@ -186,7 +182,7 @@ # For the transfer that did get cancelled, make sure the temporary # file got removed. - possible_matches = glob.glob('%s*' % future.meta.call_args.fileobj) + possible_matches = glob.glob(f'{future.meta.call_args.fileobj}*') self.assertEqual(possible_matches, []) def test_progress_subscribers_on_download(self): @@ -284,5 +280,5 @@ except Exception as e: self.fail( 'Should have been able to download to /dev/null but received ' - 'following exception %s' % e + f'following exception {e}' ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/integration/test_processpool.py new/s3transfer-0.10.3/tests/integration/test_processpool.py --- old/s3transfer-0.10.2/tests/integration/test_processpool.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/integration/test_processpool.py 2024-10-08 20:06:31.000000000 +0200 @@ -96,14 +96,12 @@ self.assertLess( end_time - start_time, max_allowed_exit_time, - "Failed to exit under {}. Instead exited in {}.".format( - max_allowed_exit_time, end_time - start_time - ), + f"Failed to exit under {max_allowed_exit_time}. Instead exited in {end_time - start_time}.", ) # Make sure the actual file and the temporary do not exist # by globbing for the file and any of its extensions - possible_matches = glob.glob('%s*' % download_path) + possible_matches = glob.glob(f'{download_path}*') self.assertEqual(possible_matches, []) def test_many_files_exits_quickly_on_exception(self): @@ -138,12 +136,10 @@ self.assertLess( end_time - start_time, max_allowed_exit_time, - "Failed to exit under {}. Instead exited in {}.".format( - max_allowed_exit_time, end_time - start_time - ), + f"Failed to exit under {max_allowed_exit_time}. Instead exited in {end_time - start_time}.", ) # For the transfer that did get cancelled, make sure the temporary # file got removed. - possible_matches = glob.glob('%s*' % base_filename) + possible_matches = glob.glob(f'{base_filename}*') self.assertEqual(possible_matches, []) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/integration/test_s3transfer.py new/s3transfer-0.10.3/tests/integration/test_s3transfer.py --- old/s3transfer-0.10.2/tests/integration/test_s3transfer.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/integration/test_s3transfer.py 2024-10-08 20:06:31.000000000 +0200 @@ -31,9 +31,7 @@ second_md5 = md5_checksum(second) if first_md5 != second_md5: raise AssertionError( - "Files are not equal: {}(md5={}) != {}(md5={})".format( - first, first_md5, second, second_md5 - ) + f"Files are not equal: {first}(md5={first_md5}) != {second}(md5={second_md5})" ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/integration/test_upload.py new/s3transfer-0.10.3/tests/integration/test_upload.py --- old/s3transfer-0.10.2/tests/integration/test_upload.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/integration/test_upload.py 2024-10-08 20:06:31.000000000 +0200 @@ -84,7 +84,7 @@ future.cancel() raise RuntimeError( "Download transfer did not start after waiting for " - "%s seconds." % timeout + f"{timeout} seconds." ) # Raise an exception which should cause the preceding # download to cancel and exit quickly @@ -101,9 +101,7 @@ self.assertLess( actual_time_to_exit, max_allowed_exit_time, - "Failed to exit under {}. Instead exited in {}.".format( - max_allowed_exit_time, actual_time_to_exit - ), + f"Failed to exit under {max_allowed_exit_time}. Instead exited in {actual_time_to_exit}.", ) try: @@ -163,9 +161,7 @@ self.assertLess( end_time - start_time, max_allowed_exit_time, - "Failed to exit under {}. Instead exited in {}.".format( - max_allowed_exit_time, end_time - start_time - ), + f"Failed to exit under {max_allowed_exit_time}. Instead exited in {end_time - start_time}.", ) # Make sure at least one of the futures got cancelled diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_compat.py new/s3transfer-0.10.3/tests/unit/test_compat.py --- old/s3transfer-0.10.2/tests/unit/test_compat.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_compat.py 2024-10-08 20:06:31.000000000 +0200 @@ -57,7 +57,7 @@ def test_non_seekable_ioerror(self): # Should return False if IOError is thrown. with open(self.filename, 'w') as f: - self.assertFalse(seekable(ErrorRaisingSeekWrapper(f, IOError()))) + self.assertFalse(seekable(ErrorRaisingSeekWrapper(f, OSError()))) def test_non_seekable_oserror(self): # Should return False if OSError is thrown. diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_crt.py new/s3transfer-0.10.3/tests/unit/test_crt.py --- old/s3transfer-0.10.2/tests/unit/test_crt.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_crt.py 2024-10-08 20:06:31.000000000 +0200 @@ -103,7 +103,7 @@ self.files = FileCreator() self.filename = self.files.create_file('myfile', 'my content') self.expected_path = "/" + self.bucket + "/" + self.key - self.expected_host = "s3.%s.amazonaws.com" % (self.region) + self.expected_host = f"s3.{self.region}.amazonaws.com" def tearDown(self): self.files.remove_all() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_futures.py new/s3transfer-0.10.3/tests/unit/test_futures.py --- old/s3transfer-0.10.2/tests/unit/test_futures.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_futures.py 2024-10-08 20:06:31.000000000 +0200 @@ -489,9 +489,7 @@ self.executor.submit(task, tag=tag, block=False) except NoResourcesAvailable: self.fail( - 'Task {} should not have been blocked. Caused by:\n{}'.format( - task, traceback.format_exc() - ) + f'Task {task} should not have been blocked. Caused by:\n{traceback.format_exc()}' ) def add_done_callback_to_future(self, future, fn, *args, **kwargs): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_manager.py new/s3transfer-0.10.3/tests/unit/test_manager.py --- old/s3transfer-0.10.2/tests/unit/test_manager.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_manager.py 2024-10-08 20:06:31.000000000 +0200 @@ -132,7 +132,7 @@ try: self.coordinator_controller.wait() except FutureResultException as e: - self.fail('%s should not have been raised.' % e) + self.fail(f'{e} should not have been raised.') def test_wait_can_be_interrupted(self): inject_interrupt_coordinator = TransferCoordinatorWithInterrupt() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_s3transfer.py new/s3transfer-0.10.3/tests/unit/test_s3transfer.py --- old/s3transfer-0.10.2/tests/unit/test_s3transfer.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_s3transfer.py 2024-10-08 20:06:31.000000000 +0200 @@ -12,7 +12,6 @@ # language governing permissions and limitations under the License. import os import shutil -import socket import tempfile from concurrent import futures from contextlib import closing @@ -434,7 +433,7 @@ response_body = b'foobarbaz' stream_with_errors = mock.Mock() stream_with_errors.read.side_effect = [ - socket.error("fake error"), + OSError("fake error"), response_body, ] client.get_object.return_value = {'Body': stream_with_errors} @@ -469,7 +468,7 @@ client = mock.Mock() response_body = b'foobarbaz' stream_with_errors = mock.Mock() - stream_with_errors.read.side_effect = socket.error("fake error") + stream_with_errors.read.side_effect = OSError("fake error") client.get_object.return_value = {'Body': stream_with_errors} config = TransferConfig(multipart_threshold=4, multipart_chunksize=4) @@ -678,7 +677,7 @@ } self.client.get_object.side_effect = [ # First request fails. - socket.error("fake error"), + OSError("fake error"), # Second succeeds. {'Body': BytesIO(b'foobar')}, ] @@ -696,7 +695,7 @@ # Here we're raising an exception every single time, which # will exhaust our retry count and propagate a # RetriesExceededError. - self.client.get_object.side_effect = socket.error("fake error") + self.client.get_object.side_effect = OSError("fake error") with self.assertRaises(RetriesExceededError): transfer.download_file('bucket', 'key', 'smallfile') diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_subscribers.py new/s3transfer-0.10.3/tests/unit/test_subscribers.py --- old/s3transfer-0.10.2/tests/unit/test_subscribers.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_subscribers.py 2024-10-08 20:06:31.000000000 +0200 @@ -54,7 +54,7 @@ except Exception as e: self.fail( 'Should be able to call base class subscriber method. ' - 'instead got: %s' % e + f'instead got: {e}' ) def test_subclass_can_have_and_call_additional_methods(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_upload.py new/s3transfer-0.10.3/tests/unit/test_upload.py --- old/s3transfer-0.10.2/tests/unit/test_upload.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_upload.py 2024-10-08 20:06:31.000000000 +0200 @@ -50,7 +50,7 @@ class OSUtilsExceptionOnFileSize(OSUtils): def get_file_size(self, filename): raise AssertionError( - "The file %s should not have been stated" % filename + f"The file {filename} should not have been stated" ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/s3transfer-0.10.2/tests/unit/test_utils.py new/s3transfer-0.10.3/tests/unit/test_utils.py --- old/s3transfer-0.10.2/tests/unit/test_utils.py 2024-06-24 23:00:46.000000000 +0200 +++ new/s3transfer-0.10.3/tests/unit/test_utils.py 2024-10-08 20:06:31.000000000 +0200 @@ -282,7 +282,7 @@ try: OSUtils().remove_file(non_existent_file) except OSError as e: - self.fail('OSError should have been caught: %s' % e) + self.fail(f'OSError should have been caught: {e}') def test_remove_file_proxies_remove_file(self): OSUtils().remove_file(self.filename) @@ -306,7 +306,7 @@ filename = 'myfile' self.assertIsNotNone( re.match( - r'%s\.[0-9A-Fa-f]{8}$' % filename, + rf'{filename}\.[0-9A-Fa-f]{{8}}$', OSUtils().get_temp_filename(filename), ) ) @@ -329,7 +329,7 @@ @mock.patch('s3transfer.utils.fallocate') def test_allocate_with_io_error(self, mock_fallocate): - mock_fallocate.side_effect = IOError() + mock_fallocate.side_effect = OSError() with self.assertRaises(IOError): OSUtils().allocate(self.filename, 1) self.assertFalse(os.path.exists(self.filename))
