Hello community,

here is the log from the commit of package python-s3transfer for 
openSUSE:Leap:15.2 checked in at 2020-05-07 19:28:05
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Leap:15.2/python-s3transfer (Old)
 and      /work/SRC/openSUSE:Leap:15.2/.python-s3transfer.new.2738 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-s3transfer"

Thu May  7 19:28:05 2020 rev:14 rq:801231 version:0.3.3

Changes:
--------
--- /work/SRC/openSUSE:Leap:15.2/python-s3transfer/python-s3transfer.changes    
2020-02-29 17:18:32.085300988 +0100
+++ 
/work/SRC/openSUSE:Leap:15.2/.python-s3transfer.new.2738/python-s3transfer.changes
  2020-05-07 19:33:42.938255399 +0200
@@ -1,0 +2,47 @@
+Fri Apr 10 16:42:43 UTC 2020 - John Paul Adrian Glaubitz 
<adrian.glaub...@suse.com>
+
+- Update in SLE-15 (bsc#1168943)
+
+-------------------------------------------------------------------
+Tue Apr  7 07:46:02 UTC 2020 - John Paul Adrian Glaubitz 
<adrian.glaub...@suse.com>
+
+- Fix build on SLE-12
+  + Add python to BuildRequires for suse_version < 1500
+  + Add python-scandir to BuildRequires for suse_version < 1500
+
+-------------------------------------------------------------------
+Fri Mar 13 12:52:07 UTC 2020 - Tomáš Chvátal <tchva...@suse.com>
+
+- Fix build without python2
+
+-------------------------------------------------------------------
+Tue Mar 10 09:45:34 UTC 2020 - Tomáš Chvátal <tchva...@suse.com>
+
+- Skip test failing on python 3.8, it is race condition that needs
+  to be fixed in tests
+
+-------------------------------------------------------------------
+Thu Feb 27 08:21:19 UTC 2020 - Tomáš Chvátal <tchva...@suse.com>
+
+- Update to 0.3.3:
+  * bugfix:dependency: Updated botocore version range
+- Drop patch hide_py_pckgmgmt.patch it should not be needed
+  * we updated setuptools on SLE 12 to handle complex dependencies
+- Use pytest for test execution:
+  * the nose is used by upstream but really not needed and it
+    will get borked with python 3.10
+
+-------------------------------------------------------------------
+Mon Jan 27 16:10:36 UTC 2020 - John Paul Adrian Glaubitz 
<adrian.glaub...@suse.com>
+
+- Update to version 0.3.2
+  * bugfix:s3: Fixes boto/botocore`#1916 
<https://github.com/boto/botocore/issues/1916>`__
+- from version 0.3.1
+  * enhancement:TransferManager: Expose client and config properties
+  * enhancement:Tags: Add support for Tagging and TaggingDirective
+- from version 0.3.0
+  * feature:Python: Dropped support for Python 2.6 and 3.3.
+- Refresh patches for new version
+  + hide_py_pckgmgmt.patch
+
+-------------------------------------------------------------------

Old:
----
  hide_py_pckgmgmt.patch
  s3transfer-0.2.1.tar.gz

New:
----
  s3transfer-0.3.3.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-s3transfer.spec ++++++
--- /var/tmp/diff_new_pack.3Jo6JA/_old  2020-05-07 19:33:44.146257816 +0200
+++ /var/tmp/diff_new_pack.3Jo6JA/_new  2020-05-07 19:33:44.146257816 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-s3transfer
 #
-# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2020 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,24 +17,30 @@
 
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
+%bcond_without python2
 Name:           python-s3transfer
-Version:        0.2.1
+Version:        0.3.3
 Release:        0
 Summary:        Python S3 transfer manager
 License:        Apache-2.0
 Group:          Development/Languages/Python
 URL:            https://github.com/boto/s3transfer
 Source0:        
https://files.pythonhosted.org/packages/source/s/s3transfer/s3transfer-%{version}.tar.gz
-Patch0:         hide_py_pckgmgmt.patch
 Patch1:         no-bundled-packages.patch
 BuildRequires:  %{python_module botocore >= 1.12.36}
 BuildRequires:  %{python_module mock}
-BuildRequires:  %{python_module nose}
+BuildRequires:  %{python_module pytest}
 BuildRequires:  %{python_module setuptools}
 BuildRequires:  %{python_module urllib3}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
+%if 0%{?suse_version} < 1500
+BuildRequires:  %{python_module scandir}
+BuildRequires:  python
+%endif
+%if %{with python2}
 BuildRequires:  python2-futures >= 2.2.0
+%endif
 Requires:       python-botocore <= 2.0.0
 Requires:       python-botocore >= 1.12.36
 Requires:       python-requests
@@ -51,9 +57,8 @@
 
 %prep
 %setup -q -n s3transfer-%{version}
-%patch0 -p1
 %patch1 -p1
-# remove integration tests that need running s3 :)
+# remove integration tests that need running s3
 rm -rf tests/integration
 
 %build
@@ -64,10 +69,8 @@
 %python_expand %fdupes %{buildroot}%{$python_sitelib}
 
 %check
-# on 32bit the tests fail on OOM and various other funny things
-%ifarch x86_64
-%python_expand nosetests-%{$python_bin_suffix}
-%endif
+# test_download_futures_fail_triggers_shutdown - 
https://github.com/boto/s3transfer/pull/162
+%pytest -k 'not test_download_futures_fail_triggers_shutdown'
 
 %files %{python_files}
 %license LICENSE.txt

++++++ s3transfer-0.2.1.tar.gz -> s3transfer-0.3.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/PKG-INFO 
new/s3transfer-0.3.3/PKG-INFO
--- old/s3transfer-0.2.1/PKG-INFO       2019-06-04 20:24:40.000000000 +0200
+++ new/s3transfer-0.3.3/PKG-INFO       2020-02-06 20:09:33.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: s3transfer
-Version: 0.2.1
+Version: 0.3.3
 Summary: An Amazon S3 Transfer Manager
 Home-page: https://github.com/boto/s3transfer
 Author: Amazon Web Services
@@ -25,10 +25,8 @@
 Classifier: Natural Language :: English
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.6
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/requirements-test.txt 
new/s3transfer-0.3.3/requirements-test.txt
--- old/s3transfer-0.2.1/requirements-test.txt  2019-06-04 20:22:26.000000000 
+0200
+++ new/s3transfer-0.3.3/requirements-test.txt  2020-02-06 20:07:20.000000000 
+0100
@@ -3,6 +3,3 @@
 mock==1.3.0
 coverage==4.0.1
 wheel==0.24.0
-# Note you need at least pip --version of 6.0 or
-# higher to be able to pick on these version specifiers.
-unittest2==0.5.1; python_version == '2.6'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/__init__.py 
new/s3transfer-0.3.3/s3transfer/__init__.py
--- old/s3transfer-0.2.1/s3transfer/__init__.py 2019-06-04 20:24:40.000000000 
+0200
+++ new/s3transfer-0.3.3/s3transfer/__init__.py 2020-02-06 20:09:33.000000000 
+0100
@@ -143,7 +143,7 @@
 
 
 __author__ = 'Amazon Web Services'
-__version__ = '0.2.1'
+__version__ = '0.3.3'
 
 
 class NullHandler(logging.Handler):
@@ -609,6 +609,7 @@
         'SSECustomerKey',
         'SSECustomerKeyMD5',
         'SSEKMSKeyId',
+        'Tagging',
     ]
 
     def __init__(self, client, config=None, osutil=None):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/bandwidth.py 
new/s3transfer-0.3.3/s3transfer/bandwidth.py
--- old/s3transfer-0.2.1/s3transfer/bandwidth.py        2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/s3transfer/bandwidth.py        2020-02-06 
20:07:20.000000000 +0100
@@ -403,7 +403,7 @@
     def _calculate_rate(self, amt, time_at_consumption):
         time_delta = time_at_consumption - self._last_time
         if time_delta <= 0:
-            # While it is really unlikley to see this in an actual transfer,
+            # While it is really unlikely to see this in an actual transfer,
             # we do not want to be returning back a negative rate or try to
             # divide the amount by zero. So instead return back an infinite
             # rate as the time delta is infinitesimally small.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/compat.py 
new/s3transfer-0.3.3/s3transfer/compat.py
--- old/s3transfer-0.2.1/s3transfer/compat.py   2019-06-04 20:22:26.000000000 
+0200
+++ new/s3transfer-0.3.3/s3transfer/compat.py   2020-02-06 20:07:20.000000000 
+0100
@@ -98,76 +98,4 @@
         fileobj.truncate(size)
 
 
-if sys.version_info[:2] == (2, 6):
-    # For Python 2.6, the start() method does not accept initializers.
-    # So we backport the functionality. This is strictly a copy from the
-    # Python 2.7 version.
-    import multiprocessing
-    import multiprocessing.managers
-    import multiprocessing.connection
-    import multiprocessing.util
-
-
-    class BaseManager(multiprocessing.managers.BaseManager):
-        def start(self, initializer=None, initargs=()):
-            '''
-            Spawn a server process for this manager object
-            '''
-            assert self._state.value == multiprocessing.managers.State.INITIAL
-
-            if initializer is not None and not hasattr(initializer,
-                                                       '__call__'):
-                raise TypeError('initializer must be a callable')
-
-            # pipe over which we will retrieve address of server
-            reader, writer = multiprocessing.Pipe(duplex=False)
-
-            # spawn process which runs a server
-            self._process = multiprocessing.Process(
-                target=type(self)._run_server,
-                args=(self._registry, self._address, self._authkey,
-                      self._serializer, writer, initializer, initargs),
-            )
-            ident = ':'.join(str(i) for i in self._process._identity)
-            self._process.name = type(self).__name__ + '-' + ident
-            self._process.start()
-
-            # get address of server
-            writer.close()
-            self._address = reader.recv()
-            reader.close()
-
-            # register a finalizer
-            self._state.value = multiprocessing.managers.State.STARTED
-            self.shutdown = multiprocessing.util.Finalize(
-                self, type(self)._finalize_manager,
-                args=(self._process, self._address, self._authkey,
-                      self._state, self._Client),
-                exitpriority=0
-            )
-
-        @classmethod
-        def _run_server(cls, registry, address, authkey, serializer,
-                        writer,
-                        initializer=None, initargs=()):
-            '''
-            Create a server, report its address and run it
-            '''
-            if initializer is not None:
-                initializer(*initargs)
-
-            # create server
-            server = cls._Server(registry, address, authkey, serializer)
-
-            # inform parent process of the server's address
-            writer.send(server.address)
-            writer.close()
-
-            # run the manager
-            multiprocessing.util.info('manager serving at %r', server.address)
-
-            server.serve_forever()
-
-
-else:
-    from multiprocessing.managers import BaseManager
+from multiprocessing.managers import BaseManager
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/copies.py 
new/s3transfer-0.3.3/s3transfer/copies.py
--- old/s3transfer-0.2.1/s3transfer/copies.py   2019-06-04 20:22:26.000000000 
+0200
+++ new/s3transfer-0.3.3/s3transfer/copies.py   2020-02-06 20:07:20.000000000 
+0100
@@ -59,7 +59,8 @@
         'CopySourceSSECustomerKey',
         'CopySourceSSECustomerAlgorithm',
         'CopySourceSSECustomerKeyMD5',
-        'MetadataDirective'
+        'MetadataDirective',
+        'TaggingDirective',
     ]
 
     COMPLETE_MULTIPART_ARGS = [
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/download.py 
new/s3transfer-0.3.3/s3transfer/download.py
--- old/s3transfer-0.2.1/s3transfer/download.py 2019-06-04 20:22:26.000000000 
+0200
+++ new/s3transfer-0.3.3/s3transfer/download.py 2020-02-06 20:07:20.000000000 
+0100
@@ -11,8 +11,6 @@
 # ANY KIND, either express or implied. See the License for the specific
 # language governing permissions and limitations under the License.
 import logging
-import os
-import socket
 import threading
 import heapq
 
@@ -100,7 +98,7 @@
         self._transfer_coordinator.submit(
             self._io_executor,
             self.get_io_write_task(fileobj, data, offset)
-         )
+        )
 
     def get_io_write_task(self, fileobj, data, offset):
         """Get an IO write task for the requested set of data
@@ -509,6 +507,7 @@
         last_exception = None
         for i in range(max_attempts):
             try:
+                current_index = start_index
                 response = client.get_object(
                     Bucket=bucket, Key=key, **extra_args)
                 streaming_body = StreamReaderProgress(
@@ -518,7 +517,6 @@
                         bandwidth_limiter.get_bandwith_limited_stream(
                             streaming_body, self._transfer_coordinator)
 
-                current_index = start_index
                 chunks = DownloadChunkIterator(streaming_body, io_chunksize)
                 for chunk in chunks:
                     # If the transfer is done because of a cancellation
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/manager.py 
new/s3transfer-0.3.3/s3transfer/manager.py
--- old/s3transfer-0.2.1/s3transfer/manager.py  2019-06-04 20:22:26.000000000 
+0200
+++ new/s3transfer-0.3.3/s3transfer/manager.py  2020-02-06 20:07:20.000000000 
+0100
@@ -177,6 +177,7 @@
         'SSECustomerKey',
         'SSECustomerKeyMD5',
         'SSEKMSKeyId',
+        'Tagging',
         'WebsiteRedirectLocation'
     ]
 
@@ -188,7 +189,8 @@
         'CopySourceSSECustomerAlgorithm',
         'CopySourceSSECustomerKey',
         'CopySourceSSECustomerKeyMD5',
-        'MetadataDirective'
+        'MetadataDirective',
+        'TaggingDirective',
     ]
 
     ALLOWED_DELETE_ARGS = [
@@ -261,6 +263,14 @@
 
         self._register_handlers()
 
+    @property
+    def client(self):
+        return self._client
+
+    @property
+    def config(self):
+        return self._config
+
     def upload(self, fileobj, bucket, key, extra_args=None, subscribers=None):
         """Uploads a file to S3
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer/utils.py 
new/s3transfer-0.3.3/s3transfer/utils.py
--- old/s3transfer-0.2.1/s3transfer/utils.py    2019-06-04 20:22:26.000000000 
+0200
+++ new/s3transfer-0.3.3/s3transfer/utils.py    2020-02-06 20:07:20.000000000 
+0100
@@ -239,6 +239,8 @@
 
 
 class OSUtils(object):
+    _MAX_FILENAME_LEN = 255
+
     def get_file_size(self, filename):
         return os.path.getsize(filename)
 
@@ -300,7 +302,11 @@
         return False
 
     def get_temp_filename(self, filename):
-        return filename + os.extsep + random_file_extension()
+        suffix = os.extsep + random_file_extension()
+        path = os.path.dirname(filename)
+        name = os.path.basename(filename)
+        temp_filename = name[:self._MAX_FILENAME_LEN - len(suffix)] + suffix 
+        return os.path.join(path, temp_filename)
 
     def allocate(self, filename, size):
         try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer.egg-info/PKG-INFO 
new/s3transfer-0.3.3/s3transfer.egg-info/PKG-INFO
--- old/s3transfer-0.2.1/s3transfer.egg-info/PKG-INFO   2019-06-04 
20:24:40.000000000 +0200
+++ new/s3transfer-0.3.3/s3transfer.egg-info/PKG-INFO   2020-02-06 
20:09:33.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: s3transfer
-Version: 0.2.1
+Version: 0.3.3
 Summary: An Amazon S3 Transfer Manager
 Home-page: https://github.com/boto/s3transfer
 Author: Amazon Web Services
@@ -25,10 +25,8 @@
 Classifier: Natural Language :: English
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.6
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/s3transfer.egg-info/requires.txt 
new/s3transfer-0.3.3/s3transfer.egg-info/requires.txt
--- old/s3transfer-0.2.1/s3transfer.egg-info/requires.txt       2019-06-04 
20:24:40.000000000 +0200
+++ new/s3transfer-0.3.3/s3transfer.egg-info/requires.txt       2020-02-06 
20:09:33.000000000 +0100
@@ -1,5 +1,5 @@
-botocore<2.0.0,>=1.12.36
+botocore<2.0a.0,>=1.12.36
 futures<4.0.0,>=2.2.0
 
-[:python_version=="2.6" or python_version=="2.7"]
+[:python_version=="2.7"]
 futures<4.0.0,>=2.2.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/setup.cfg 
new/s3transfer-0.3.3/setup.cfg
--- old/s3transfer-0.2.1/setup.cfg      2019-06-04 20:24:40.000000000 +0200
+++ new/s3transfer-0.3.3/setup.cfg      2020-02-06 20:09:33.000000000 +0100
@@ -3,8 +3,8 @@
 
 [metadata]
 requires-dist = 
-       botocore>=1.12.36,<2.0.0
-       futures>=2.2.0,<4.0.0; python_version=="2.6" or python_version=="2.7"
+       botocore>=1.12.36,<2.0a.0
+       futures>=2.2.0,<4.0.0; python_version=="2.7"
 
 [egg_info]
 tag_build = 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/setup.py 
new/s3transfer-0.3.3/setup.py
--- old/s3transfer-0.2.1/setup.py       2019-06-04 20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/setup.py       2020-02-06 20:09:33.000000000 +0100
@@ -11,7 +11,7 @@
 
 
 requires = [
-    'botocore>=1.12.36,<2.0.0',
+    'botocore>=1.12.36,<2.0a.0',
 ]
 
 
@@ -38,7 +38,7 @@
     include_package_data=True,
     install_requires=requires,
     extras_require={
-        ':python_version=="2.6" or python_version=="2.7"': [
+        ':python_version=="2.7"': [
             'futures>=2.2.0,<4.0.0']
     },
     license="Apache License 2.0",
@@ -48,10 +48,8 @@
         'Natural Language :: English',
         'License :: OSI Approved :: Apache Software License',
         'Programming Language :: Python',
-        'Programming Language :: Python :: 2.6',
         'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.3',
         'Programming Language :: Python :: 3.4',
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/functional/test_copy.py 
new/s3transfer-0.3.3/tests/functional/test_copy.py
--- old/s3transfer-0.2.1/tests/functional/test_copy.py  2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/functional/test_copy.py  2020-02-06 
20:07:20.000000000 +0100
@@ -257,6 +257,25 @@
         for allowed_upload_arg in self._manager.ALLOWED_COPY_ARGS:
             self.assertIn(allowed_upload_arg, op_model.input_shape.members)
 
+    def test_copy_with_tagging(self):
+        extra_args = {
+            'Tagging': 'tag1=val1', 'TaggingDirective': 'REPLACE'
+        }
+        self.add_head_object_response()
+        self.add_successful_copy_responses(
+            expected_copy_params={
+                'Bucket': self.bucket,
+                'Key': self.key,
+                'CopySource': self.copy_source,
+                'Tagging': 'tag1=val1',
+                'TaggingDirective': 'REPLACE'
+            }
+        )
+        future = self.manager.copy(
+            self.copy_source, self.bucket, self.key, extra_args)
+        future.result()
+        self.stubber.assert_no_pending_responses()
+
 
 class TestMultipartCopy(BaseCopyTest):
     __test__ = True
@@ -510,3 +529,20 @@
         with self.assertRaisesRegexp(ClientError, 'ArbitraryFailure'):
             future.result()
         self.stubber.assert_no_pending_responses()
+
+    def test_mp_copy_with_tagging_directive(self):
+        extra_args = {
+            'Tagging': 'tag1=val1', 'TaggingDirective': 'REPLACE'
+        }
+        self.add_head_object_response()
+        self.add_successful_copy_responses(
+            expected_create_mpu_params={
+                'Bucket': self.bucket,
+                'Key': self.key,
+                'Tagging': 'tag1=val1',
+            }
+        )
+        future = self.manager.copy(
+            self.copy_source, self.bucket, self.key, extra_args)
+        future.result()
+        self.stubber.assert_no_pending_responses()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/functional/test_manager.py 
new/s3transfer-0.3.3/tests/functional/test_manager.py
--- old/s3transfer-0.2.1/tests/functional/test_manager.py       2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/functional/test_manager.py       2020-02-06 
20:07:20.000000000 +0100
@@ -163,3 +163,12 @@
         with self.assertRaises(ArbitraryException):
             with TransferManager(self.client):
                 raise ArbitraryException(u'\u2713')
+
+    def test_client_property(self):
+        manager = TransferManager(self.client)
+        self.assertIs(manager.client, self.client)
+
+    def test_config_property(self):
+        config = TransferConfig()
+        manager = TransferManager(self.client, config)
+        self.assertIs(manager.config, config)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/integration/__init__.py 
new/s3transfer-0.3.3/tests/integration/__init__.py
--- old/s3transfer-0.2.1/tests/integration/__init__.py  2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/integration/__init__.py  2020-02-06 
20:07:20.000000000 +0100
@@ -10,23 +10,43 @@
 # distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
 # ANY KIND, either express or implied. See the License for the specific
 # language governing permissions and limitations under the License.
+import botocore
 import botocore.session
-from botocore.exceptions import ClientError
+from botocore.exceptions import WaiterError
 
 from tests import unittest
 from tests import FileCreator
 from tests import random_bucket_name
 from s3transfer.manager import TransferManager
+from s3transfer.subscribers import BaseSubscriber
 
 
 def recursive_delete(client, bucket_name):
-    # Recursively deletes a bucket and all of its contents.
-    objects = client.get_paginator('list_objects').paginate(
-        Bucket=bucket_name)
-    for key in objects.search('Contents[].Key || `[]`'):
-        if key:
-            client.delete_object(Bucket=bucket_name, Key=key)
-    client.delete_bucket(Bucket=bucket_name)
+    # Ensure the bucket exists before attempting to wipe it out
+    exists_waiter = client.get_waiter('bucket_exists')
+    exists_waiter.wait(Bucket=bucket_name)
+    page = client.get_paginator('list_objects')
+    # Use pages paired with batch delete_objects().
+    for page in page.paginate(Bucket=bucket_name):
+        keys = [{'Key': obj['Key']} for obj in page.get('Contents', [])]
+        if keys:
+            client.delete_objects(Bucket=bucket_name, Delete={'Objects': keys})
+    for _ in range(5):
+        try:
+            client.delete_bucket(Bucket=bucket_name)
+            break
+        except client.exceptions.NoSuchBucket:
+            exists_waiter.wait(Bucket=bucket_name)
+        except Exception as e:
+            # We can sometimes get exceptions when trying to
+            # delete a bucket.  We'll let the waiter make
+            # the final call as to whether the bucket was able
+            # to be deleted.
+            not_exists_waiter = client.get_waiter('bucket_not_exists')
+            try:
+                not_exists_waiter.wait(Bucket=bucket_name)
+            except botocore.exceptions.WaiterError:
+                continue
 
 
 class BaseTransferManagerIntegTest(unittest.TestCase):
@@ -57,19 +77,51 @@
             Bucket=self.bucket_name,
             Key=key)
 
-    def object_exists(self, key):
+    def object_exists(self, key, extra_args=None):
+        try:
+            self.wait_object_exists(key, extra_args)
+            return True
+        except WaiterError:
+            return False
+
+    def object_not_exists(self, key, extra_args=None):
+        if extra_args is None:
+            extra_args = {}
         try:
-            self.client.head_object(Bucket=self.bucket_name, Key=key)
+            self.client.get_waiter('object_not_exists').wait(
+                Bucket=self.bucket_name,
+                Key=key,
+                **extra_args
+            )
             return True
-        except ClientError:
+        except WaiterError:
             return False
 
+    def wait_object_exists(self, key, extra_args=None):
+        if extra_args is None:
+            extra_args = {}
+        for _ in range(5):
+            self.client.get_waiter('object_exists').wait(
+                Bucket=self.bucket_name,
+                Key=key,
+                **extra_args
+            )
+
     def create_transfer_manager(self, config=None):
         return TransferManager(self.client, config=config)
 
-    def upload_file(self, filename, key):
+    def upload_file(self, filename, key, extra_args=None):
+        transfer = self.create_transfer_manager()
         with open(filename, 'rb') as f:
-            self.client.put_object(Bucket=self.bucket_name,
-                                   Key=key,
-                                   Body=f)
+            transfer.upload(f, self.bucket_name, key, extra_args)
+            self.wait_object_exists(key, extra_args)
             self.addCleanup(self.delete_object, key)
+
+
+class WaitForTransferStart(BaseSubscriber):
+    def __init__(self, bytes_transfer_started_event):
+        self._bytes_transfer_started_event = bytes_transfer_started_event
+
+    def on_progress(self, **kwargs):
+        if not self._bytes_transfer_started_event.is_set():
+            self._bytes_transfer_started_event.set()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/integration/test_delete.py 
new/s3transfer-0.3.3/tests/integration/test_delete.py
--- old/s3transfer-0.2.1/tests/integration/test_delete.py       2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/integration/test_delete.py       2020-02-06 
20:07:20.000000000 +0100
@@ -28,4 +28,4 @@
                                          key=key_name)
         future.result()
 
-        self.assertFalse(self.object_exists(key_name))
+        self.assertTrue(self.object_not_exists(key_name))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/integration/test_download.py 
new/s3transfer-0.3.3/tests/integration/test_download.py
--- old/s3transfer-0.2.1/tests/integration/test_download.py     2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/integration/test_download.py     2020-02-06 
20:07:20.000000000 +0100
@@ -13,6 +13,7 @@
 import glob
 import os
 import time
+import threading
 
 from concurrent.futures import CancelledError
 
@@ -22,7 +23,9 @@
 from tests import RecordingSubscriber
 from tests import NonSeekableWriter
 from tests.integration import BaseTransferManagerIntegTest
+from tests.integration import WaitForTransferStart
 from s3transfer.manager import TransferConfig
+from s3transfer.subscribers import BaseSubscriber
 
 
 class TestDownload(BaseTransferManagerIntegTest):
@@ -73,16 +76,23 @@
         self.upload_file(filename, '60mb.txt')
 
         download_path = os.path.join(self.files.rootdir, '60mb.txt')
-        sleep_time = 0.5
+        timeout = 10
+        bytes_transferring = threading.Event()
+        subscriber = WaitForTransferStart(bytes_transferring)
         try:
             with transfer_manager:
-                start_time = time.time()
                 future = transfer_manager.download(
-                    self.bucket_name, '60mb.txt', download_path)
-                # Sleep for a little to get the transfer process going
-                time.sleep(sleep_time)
+                    self.bucket_name, '60mb.txt', download_path,
+                    subscribers=[subscriber]
+                )
+                if not bytes_transferring.wait(timeout):
+                    future.cancel()
+                    raise RuntimeError(
+                        "Download transfer did not start after waiting for "
+                        "%s seconds." % timeout)
                 # Raise an exception which should cause the preceeding
                 # download to cancel and exit quickly
+                start_time = time.time()
                 raise KeyboardInterrupt()
         except KeyboardInterrupt:
             pass
@@ -90,11 +100,12 @@
         # The maximum time allowed for the transfer manager to exit.
         # This means that it should take less than a couple second after
         # sleeping to exit.
-        max_allowed_exit_time = sleep_time + 4
+        max_allowed_exit_time = 5
+        actual_time_to_exit = end_time - start_time
         self.assertLess(
-            end_time - start_time, max_allowed_exit_time,
+            actual_time_to_exit, max_allowed_exit_time,
             "Failed to exit under %s. Instead exited in %s." % (
-                max_allowed_exit_time, end_time - start_time)
+                max_allowed_exit_time, actual_time_to_exit)
         )
 
         # Make sure the future was cancelled because of the KeyboardInterrupt
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/s3transfer-0.2.1/tests/integration/test_processpool.py 
new/s3transfer-0.3.3/tests/integration/test_processpool.py
--- old/s3transfer-0.2.1/tests/integration/test_processpool.py  2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/integration/test_processpool.py  2020-02-06 
20:07:20.000000000 +0100
@@ -72,13 +72,13 @@
         sleep_time = 0.5
         try:
             with downloader:
-                start_time = time.time()
                 downloader.download_file(
                     self.bucket_name, '60mb.txt', download_path)
                 # Sleep for a little to get the transfer process going
                 time.sleep(sleep_time)
                 # Raise an exception which should cause the preceding
                 # download to cancel and exit quickly
+                start_time = time.time()
                 raise KeyboardInterrupt()
         except KeyboardInterrupt:
             pass
@@ -86,7 +86,7 @@
         # The maximum time allowed for the transfer manager to exit.
         # This means that it should take less than a couple second after
         # sleeping to exit.
-        max_allowed_exit_time = sleep_time + 4
+        max_allowed_exit_time = 5
         self.assertLess(
             end_time - start_time, max_allowed_exit_time,
             "Failed to exit under %s. Instead exited in %s." % (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/s3transfer-0.2.1/tests/integration/test_s3transfer.py 
new/s3transfer-0.3.3/tests/integration/test_s3transfer.py
--- old/s3transfer-0.2.1/tests/integration/test_s3transfer.py   2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/integration/test_s3transfer.py   2020-02-06 
20:07:20.000000000 +0100
@@ -19,6 +19,7 @@
 import string
 
 from tests import unittest
+from tests.integration import BaseTransferManagerIntegTest
 import botocore.session
 from botocore.compat import six
 from botocore.client import Config
@@ -104,42 +105,13 @@
         return os.path.join(self.rootdir, filename)
 
 
-class TestS3Transfers(unittest.TestCase):
+class TestS3Transfers(BaseTransferManagerIntegTest):
     """Tests for the high level s3transfer module."""
 
-    @classmethod
-    def setUpClass(cls):
-        cls.region = 'us-west-2'
-        cls.session = botocore.session.get_session()
-        cls.client = cls.session.create_client('s3', cls.region)
-        cls.bucket_name = random_bucket_name()
-        cls.client.create_bucket(
-            Bucket=cls.bucket_name,
-            CreateBucketConfiguration={'LocationConstraint': cls.region})
-
-    def setUp(self):
-        self.files = FileCreator()
-
-    def tearDown(self):
-        self.files.remove_all()
-
-    @classmethod
-    def tearDownClass(cls):
-        cls.client.delete_bucket(Bucket=cls.bucket_name)
-
-    def delete_object(self, key):
-        self.client.delete_object(
-            Bucket=self.bucket_name,
-            Key=key)
-
-    def object_exists(self, key):
-        self.client.head_object(Bucket=self.bucket_name,
-                                Key=key)
-        return True
-
     def create_s3_transfer(self, config=None):
-        return s3transfer.S3Transfer(self.client,
-                                            config=config)
+        return s3transfer.S3Transfer(
+            self.client, config=config
+        )
 
     def assert_has_public_read_acl(self, response):
         grants = response['Grants']
@@ -200,6 +172,7 @@
         transfer.upload_file(filename, self.bucket_name,
                              '6mb.txt', extra_args=extra_args)
         self.addCleanup(self.delete_object, '6mb.txt')
+        self.wait_object_exists('6mb.txt', extra_args)
         # A head object will fail if it has a customer key
         # associated with it and it's not provided in the HeadObject
         # request so we can use this to verify our functionality.
@@ -258,6 +231,7 @@
                              'foo.txt', extra_args={'ACL': 'public-read'})
         self.addCleanup(self.delete_object, 'foo.txt')
 
+        self.wait_object_exists('foo.txt')
         response = self.client.get_object_acl(
             Bucket=self.bucket_name, Key='foo.txt')
         self.assert_has_public_read_acl(response)
@@ -284,11 +258,8 @@
             'SSECustomerKey': key_bytes,
             'SSECustomerAlgorithm': 'AES256',
         }
-        self.client.put_object(Bucket=self.bucket_name,
-                               Key='foo.txt',
-                               Body=b'hello world',
-                               **extra_args)
-        self.addCleanup(self.delete_object, 'foo.txt')
+        filename = self.files.create_file('foo.txt', 'hello world')
+        self.upload_file(filename, 'foo.txt', extra_args)
         transfer = self.create_s3_transfer()
 
         download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
@@ -308,10 +279,7 @@
         transfer = self.create_s3_transfer()
         filename = self.files.create_file_with_size(
             '20mb.txt', filesize=20 * 1024 * 1024)
-        with open(filename, 'rb') as f:
-            self.client.put_object(Bucket=self.bucket_name,
-                                   Key='20mb.txt', Body=f)
-        self.addCleanup(self.delete_object, '20mb.txt')
+        self.upload_file(filename, '20mb.txt')
 
         download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
         transfer.download_file(self.bucket_name, '20mb.txt',
@@ -321,14 +289,9 @@
 
     def test_download_below_threshold(self):
         transfer = self.create_s3_transfer()
-
         filename = self.files.create_file_with_size(
             'foo.txt', filesize=1024 * 1024)
-        with open(filename, 'rb') as f:
-            self.client.put_object(Bucket=self.bucket_name,
-                                   Key='foo.txt',
-                                   Body=f)
-            self.addCleanup(self.delete_object, 'foo.txt')
+        self.upload_file(filename, 'foo.txt')
 
         download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
         transfer.download_file(self.bucket_name, 'foo.txt',
@@ -337,14 +300,9 @@
 
     def test_download_above_threshold(self):
         transfer = self.create_s3_transfer()
-
         filename = self.files.create_file_with_size(
             'foo.txt', filesize=20 * 1024 * 1024)
-        with open(filename, 'rb') as f:
-            self.client.put_object(Bucket=self.bucket_name,
-                                   Key='foo.txt',
-                                   Body=f)
-            self.addCleanup(self.delete_object, 'foo.txt')
+        self.upload_file(filename, 'foo.txt')
 
         download_path = os.path.join(self.files.rootdir, 'downloaded.txt')
         transfer.download_file(self.bucket_name, 'foo.txt',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/integration/test_upload.py 
new/s3transfer-0.3.3/tests/integration/test_upload.py
--- old/s3transfer-0.2.1/tests/integration/test_upload.py       2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/integration/test_upload.py       2020-02-06 
20:07:20.000000000 +0100
@@ -11,6 +11,7 @@
 # ANY KIND, either express or implied. See the License for the specific
 # language governing permissions and limitations under the License.
 import time
+import threading
 
 from concurrent.futures import CancelledError
 
@@ -18,6 +19,7 @@
 from tests import skip_if_using_serial_implementation
 from tests import RecordingSubscriber, NonSeekableReader
 from tests.integration import BaseTransferManagerIntegTest
+from tests.integration import WaitForTransferStart
 from s3transfer.manager import TransferConfig
 
 
@@ -62,16 +64,23 @@
         filename = self.get_input_fileobj(
             name='foo.txt', size=20 * 1024 * 1024)
 
-        sleep_time = 0.25
+        timeout = 10
+        bytes_transferring = threading.Event()
+        subscriber = WaitForTransferStart(bytes_transferring)
         try:
             with transfer_manager:
-                start_time = time.time()
                 future = transfer_manager.upload(
-                    filename, self.bucket_name, '20mb.txt')
-                # Sleep for a little to get the transfer process going
-                time.sleep(sleep_time)
+                    filename, self.bucket_name, '20mb.txt',
+                    subscribers=[subscriber]
+                )
+                if not bytes_transferring.wait(timeout):
+                    future.cancel()
+                    raise RuntimeError(
+                        "Download transfer did not start after waiting for "
+                        "%s seconds." % timeout)
                 # Raise an exception which should cause the preceeding
                 # download to cancel and exit quickly
+                start_time = time.time()
                 raise KeyboardInterrupt()
         except KeyboardInterrupt:
             pass
@@ -79,11 +88,12 @@
         # The maximum time allowed for the transfer manager to exit.
         # This means that it should take less than a couple second after
         # sleeping to exit.
-        max_allowed_exit_time = sleep_time + 5
+        max_allowed_exit_time = 5
+        actual_time_to_exit = end_time - start_time
         self.assertLess(
-            end_time - start_time, max_allowed_exit_time,
+            actual_time_to_exit, max_allowed_exit_time,
             "Failed to exit under %s. Instead exited in %s." % (
-                max_allowed_exit_time, end_time - start_time)
+                max_allowed_exit_time, actual_time_to_exit)
         )
 
         try:
@@ -95,8 +105,7 @@
             self.assertEqual(str(e), 'KeyboardInterrupt()')
             # If the transfer did get cancelled,
             # make sure the object does not exist.
-            self.assertFalse(self.object_exists('20mb.txt'))
-
+            self.assertTrue(self.object_not_exists('20mb.txt'))
 
     @skip_if_using_serial_implementation(
         'Exception is thrown once the transfers are submitted. '
@@ -123,12 +132,12 @@
 
         try:
             with transfer_manager:
-                start_time = time.time()
                 for i, fileobj in enumerate(fileobjs):
                     futures.append(transfer_manager.upload(
                         fileobj, self.bucket_name, keynames[i]))
                 # Raise an exception which should cause the preceeding
                 # transfer to cancel and exit quickly
+                start_time = time.time()
                 raise KeyboardInterrupt()
         except KeyboardInterrupt:
             pass
@@ -148,7 +157,7 @@
                 future.result()
         # For the transfer that did get cancelled, make sure the object
         # does not exist.
-        self.assertFalse(self.object_exists(future.meta.call_args.key))
+        self.assertTrue(self.object_not_exists(future.meta.call_args.key))
 
     def test_progress_subscribers_on_upload(self):
         subscriber = RecordingSubscriber()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/unit/test_download.py 
new/s3transfer-0.3.3/tests/unit/test_download.py
--- old/s3transfer-0.2.1/tests/unit/test_download.py    2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/unit/test_download.py    2020-02-06 
20:07:20.000000000 +0100
@@ -14,6 +14,7 @@
 import os
 import shutil
 import tempfile
+import socket
 import mock
 
 from tests import BaseTaskTest
@@ -713,6 +714,19 @@
         # io queue.
         self.assert_io_writes([])
 
+    def test_handles_callback_on_initial_error(self):
+        # We can't use the stubber for this because we need to raise
+        # a S3_RETRYABLE_DOWNLOAD_ERRORS, and the stubber only allows
+        # you to raise a ClientError.
+        self.client.get_object = mock.Mock(side_effect=SOCKET_ERROR())
+        task = self.get_download_task()
+        task()
+        self.transfer_coordinator.announce_done()
+        # Should have failed out on a RetriesExceededError because
+        # get_object keeps raising a socket error.
+        with self.assertRaises(RetriesExceededError):
+            self.transfer_coordinator.result()
+
 
 class TestImmediatelyWriteIOGetObjectTask(TestGetObjectTask):
     def setUp(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/unit/test_futures.py 
new/s3transfer-0.3.3/tests/unit/test_futures.py
--- old/s3transfer-0.2.1/tests/unit/test_futures.py     2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/unit/test_futures.py     2020-02-06 
20:07:20.000000000 +0100
@@ -470,7 +470,11 @@
         try:
             self.executor.submit(task, tag=tag, block=False)
         except NoResourcesAvailable:
-            self.fail('Task %s should not have been blocked' % task)
+            self.fail(
+                'Task %s should not have been blocked. Caused by:\n%s' % (
+                    task, traceback.format_exc()
+                )
+            )
 
     def add_done_callback_to_future(self, future, fn, *args, **kwargs):
         callback_for_future = FunctionContainer(fn, *args, **kwargs)
@@ -497,7 +501,7 @@
         self.assert_submit_would_block(second_task)
 
     def test_executor_clears_capacity_on_done_tasks(self):
-        first_task = self.get_task(ReturnFooTask)
+        first_task = self.get_sleep_task()
         second_task = self.get_task(ReturnFooTask)
 
         # Submit a task.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/s3transfer-0.2.1/tests/unit/test_utils.py 
new/s3transfer-0.3.3/tests/unit/test_utils.py
--- old/s3transfer-0.2.1/tests/unit/test_utils.py       2019-06-04 
20:22:26.000000000 +0200
+++ new/s3transfer-0.3.3/tests/unit/test_utils.py       2020-02-06 
20:07:20.000000000 +0100
@@ -305,6 +305,16 @@
             )
         )
 
+    def test_get_temp_filename_len_255(self):
+        filename = 'a'*255
+        temp_filename = OSUtils().get_temp_filename(filename)
+        self.assertLessEqual(len(temp_filename), 255)
+    
+    def test_get_temp_filename_len_gt_255(self):
+        filename = 'a'*280
+        temp_filename = OSUtils().get_temp_filename(filename)
+        self.assertLessEqual(len(temp_filename), 255)
+
     def test_allocate(self):
         truncate_size = 1
         OSUtils().allocate(self.filename, truncate_size)


Reply via email to