Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-google-resumable-media for 
openSUSE:Factory checked in at 2022-04-08 22:45:56
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-google-resumable-media (Old)
 and      /work/SRC/openSUSE:Factory/.python-google-resumable-media.new.1900 
(New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-google-resumable-media"

Fri Apr  8 22:45:56 2022 rev:13 rq:967754 version:2.3.2

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/python-google-resumable-media/python-google-resumable-media.changes
      2022-01-23 18:38:44.353926393 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-google-resumable-media.new.1900/python-google-resumable-media.changes
    2022-04-08 22:46:06.538879267 +0200
@@ -1,0 +2,22 @@
+Fri Apr  8 11:03:03 UTC 2022 - John Paul Adrian Glaubitz 
<[email protected]>
+
+- Update to 2.3.2
+  Bug Fixes
+  * append existing headers in prepare_initiate_request (#314)
+- from version 2.3.1
+  Bug Fixes
+  * include existing headers in prepare request (#309)
+- from version 2.3.0
+  Features
+  * safely resume interrupted downloads (#294)
+- from version 2.2.1
+  Bug Fixes
+  * don't overwrite user-agent on requests (42b380e)
+- from version 2.2.0
+  Features
+  * add 'py.typed' declaration file (#287)
+  * add support for signed resumable upload URLs (#290)
+  Bug Fixes
+  * add user-agent on requests (#295)
+
+-------------------------------------------------------------------

Old:
----
  google-resumable-media-2.1.0.tar.gz

New:
----
  google-resumable-media-2.3.2.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-google-resumable-media.spec ++++++
--- /var/tmp/diff_new_pack.U8vMiJ/_old  2022-04-08 22:46:07.174872208 +0200
+++ /var/tmp/diff_new_pack.U8vMiJ/_new  2022-04-08 22:46:07.182872118 +0200
@@ -19,7 +19,7 @@
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 %define skip_python2 1
 Name:           python-google-resumable-media
-Version:        2.1.0
+Version:        2.3.2
 Release:        0
 Summary:        Utilities for Google Media Downloads and Resumable Uploads
 License:        Apache-2.0

++++++ google-resumable-media-2.1.0.tar.gz -> 
google-resumable-media-2.3.2.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/google-resumable-media-2.1.0/PKG-INFO 
new/google-resumable-media-2.3.2/PKG-INFO
--- old/google-resumable-media-2.1.0/PKG-INFO   2021-10-25 19:36:02.955088000 
+0200
+++ new/google-resumable-media-2.3.2/PKG-INFO   2022-03-08 21:54:01.657518000 
+0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: google-resumable-media
-Version: 2.1.0
+Version: 2.3.2
 Summary: Utilities for Google Media Downloads and Resumable Uploads
 Home-page: https://github.com/googleapis/google-resumable-media-python
 Author: Google Cloud Platform
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/google-resumable-media-2.1.0/google/__init__.py 
new/google-resumable-media-2.3.2/google/__init__.py
--- old/google-resumable-media-2.1.0/google/__init__.py 2021-10-25 
19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/__init__.py 2022-03-08 
21:51:16.000000000 +0100
@@ -19,4 +19,4 @@
 except ImportError:
     import pkgutil
 
-    __path__ = pkgutil.extend_path(__path__, __name__)
+    __path__ = pkgutil.extend_path(__path__, __name__)  # type: ignore
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/_async_resumable_media/_upload.py 
new/google-resumable-media-2.3.2/google/_async_resumable_media/_upload.py
--- old/google-resumable-media-2.1.0/google/_async_resumable_media/_upload.py   
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/_async_resumable_media/_upload.py   
2022-03-08 21:51:16.000000000 +0100
@@ -612,7 +612,7 @@
         """
         self._invalid = True
 
-    async def _process_response(self, response, bytes_sent):
+    async def _process_resumable_response(self, response, bytes_sent):
         """Process the response from an HTTP request.
 
         This is everything that must be done after a request that doesn't
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/_async_resumable_media/requests/_request_helpers.py
 
new/google-resumable-media-2.3.2/google/_async_resumable_media/requests/_request_helpers.py
--- 
old/google-resumable-media-2.1.0/google/_async_resumable_media/requests/_request_helpers.py
 2021-10-25 19:33:28.000000000 +0200
+++ 
new/google-resumable-media-2.3.2/google/_async_resumable_media/requests/_request_helpers.py
 2022-03-08 21:51:16.000000000 +0100
@@ -23,8 +23,8 @@
 from google._async_resumable_media import _helpers
 from google.resumable_media import common
 
-import google.auth.transport._aiohttp_requests as aiohttp_requests
-import aiohttp
+from google.auth.transport import _aiohttp_requests as aiohttp_requests  # 
type: ignore
+import aiohttp  # type: ignore
 
 _DEFAULT_RETRY_STRATEGY = common.RetryStrategy()
 _SINGLE_GET_CHUNK_SIZE = 8192
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/_async_resumable_media/requests/download.py
 
new/google-resumable-media-2.3.2/google/_async_resumable_media/requests/download.py
--- 
old/google-resumable-media-2.1.0/google/_async_resumable_media/requests/download.py
 2021-10-25 19:33:28.000000000 +0200
+++ 
new/google-resumable-media-2.3.2/google/_async_resumable_media/requests/download.py
 2022-03-08 21:51:16.000000000 +0100
@@ -14,7 +14,7 @@
 
 """Support for downloading media from Google APIs."""
 
-import urllib3.response
+import urllib3.response  # type: ignore
 
 from google._async_resumable_media import _download
 from google._async_resumable_media import _helpers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/_async_resumable_media/requests/upload.py
 
new/google-resumable-media-2.3.2/google/_async_resumable_media/requests/upload.py
--- 
old/google-resumable-media-2.1.0/google/_async_resumable_media/requests/upload.py
   2021-10-25 19:33:28.000000000 +0200
+++ 
new/google-resumable-media-2.3.2/google/_async_resumable_media/requests/upload.py
   2022-03-08 21:51:16.000000000 +0100
@@ -481,7 +481,7 @@
             retry_strategy=self._retry_strategy,
             timeout=timeout,
         )
-        await self._process_response(response, len(payload))
+        await self._process_resumable_response(response, len(payload))
         return response
 
     async def recover(self, transport):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/_download.py 
new/google-resumable-media-2.3.2/google/resumable_media/_download.py
--- old/google-resumable-media-2.1.0/google/resumable_media/_download.py        
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/resumable_media/_download.py        
2022-03-08 21:51:16.000000000 +0100
@@ -139,6 +139,10 @@
             media_url, stream=stream, start=start, end=end, headers=headers
         )
         self.checksum = checksum
+        self._bytes_downloaded = 0
+        self._expected_checksum = None
+        self._checksum_object = None
+        self._object_generation = None
 
     def _prepare_request(self):
         """Prepare the contents of an HTTP request.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/_helpers.py 
new/google-resumable-media-2.3.2/google/resumable_media/_helpers.py
--- old/google-resumable-media-2.1.0/google/resumable_media/_helpers.py 
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/resumable_media/_helpers.py 
2022-03-08 21:51:16.000000000 +0100
@@ -22,6 +22,11 @@
 import random
 import warnings
 
+from urllib.parse import parse_qs
+from urllib.parse import urlencode
+from urllib.parse import urlsplit
+from urllib.parse import urlunsplit
+
 from google.resumable_media import common
 
 
@@ -33,6 +38,7 @@
     "implementation. Python 3 has a faster implementation, `google-crc32c`, "
     "which will be used if it is installed."
 )
+_GENERATION_HEADER = "x-goog-generation"
 _HASH_HEADER = "x-goog-hash"
 _MISSING_CHECKSUM = """\
 No {checksum_type} checksum was returned from the service while downloading {}
@@ -140,12 +146,12 @@
     to use CRCMod. CRCMod might be using a 'slow' varietal. If so, warn...
     """
     try:
-        import google_crc32c
+        import google_crc32c  # type: ignore
 
         crc_obj = google_crc32c.Checksum()
     except ImportError:
         try:
-            import crcmod
+            import crcmod  # type: ignore
 
             crc_obj = crcmod.predefined.Crc("crc-32c")
             _is_fast_crcmod()
@@ -302,6 +308,67 @@
         raise ValueError("checksum must be ``'md5'``, ``'crc32c'`` or 
``None``")
 
 
+def _parse_generation_header(response, get_headers):
+    """Parses the generation header from an ``X-Goog-Generation`` value.
+
+    Args:
+        response (~requests.Response): The HTTP response object.
+        get_headers (callable: response->dict): returns response headers.
+
+    Returns:
+        Optional[long]: The object generation from the response, if it
+        can be detected from the ``X-Goog-Generation`` header; otherwise, None.
+    """
+    headers = get_headers(response)
+    object_generation = headers.get(_GENERATION_HEADER, None)
+
+    if object_generation is None:
+        return None
+    else:
+        return int(object_generation)
+
+
+def _get_generation_from_url(media_url):
+    """Retrieve the object generation query param specified in the media url.
+
+    Args:
+        media_url (str): The URL containing the media to be downloaded.
+
+    Returns:
+        long: The object generation from the media url if exists; otherwise, 
None.
+    """
+
+    _, _, _, query, _ = urlsplit(media_url)
+    query_params = parse_qs(query)
+    object_generation = query_params.get("generation", None)
+
+    if object_generation is None:
+        return None
+    else:
+        return int(object_generation[0])
+
+
+def add_query_parameters(media_url, query_params):
+    """Add query parameters to a base url.
+
+    Args:
+        media_url (str): The URL containing the media to be downloaded.
+        query_params (dict): Names and values of the query parameters to add.
+
+    Returns:
+        str: URL with additional query strings appended.
+    """
+
+    if len(query_params) == 0:
+        return media_url
+
+    scheme, netloc, path, query, frag = urlsplit(media_url)
+    params = parse_qs(query)
+    new_params = {**params, **query_params}
+    query = urlencode(new_params, doseq=True)
+    return urlunsplit((scheme, netloc, path, query, frag))
+
+
 class _DoNothingHash(object):
     """Do-nothing hash object.
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/_upload.py 
new/google-resumable-media-2.3.2/google/resumable_media/_upload.py
--- old/google-resumable-media-2.1.0/google/resumable_media/_upload.py  
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/resumable_media/_upload.py  
2022-03-08 21:51:16.000000000 +0100
@@ -27,6 +27,7 @@
 import random
 import re
 import sys
+import urllib.parse
 
 from google import resumable_media
 from google.resumable_media import _helpers
@@ -462,10 +463,20 @@
 
         self._stream = stream
         self._content_type = content_type
-        headers = {
-            _CONTENT_TYPE_HEADER: "application/json; charset=UTF-8",
-            "x-upload-content-type": content_type,
-        }
+
+        # Signed URL requires content type set directly - not through 
x-upload-content-type
+        parse_result = urllib.parse.urlparse(self.upload_url)
+        parsed_query = urllib.parse.parse_qs(parse_result.query)
+        if "x-goog-signature" in parsed_query or "X-Goog-Signature" in 
parsed_query:
+            # Deconstruct **self._headers first so that content type defined 
here takes priority
+            headers = {**self._headers, _CONTENT_TYPE_HEADER: content_type}
+        else:
+            # Deconstruct **self._headers first so that content type defined 
here takes priority
+            headers = {
+                **self._headers,
+                _CONTENT_TYPE_HEADER: "application/json; charset=UTF-8",
+                "x-upload-content-type": content_type,
+            }
         # Set the total bytes if possible.
         if total_bytes is not None:
             self._total_bytes = total_bytes
@@ -476,7 +487,6 @@
             content_length = "{:d}".format(self._total_bytes)
             headers["x-upload-content-length"] = content_length
 
-        headers.update(self._headers)
         payload = json.dumps(metadata).encode("utf-8")
         return _POST, self.upload_url, payload, headers
 
@@ -611,6 +621,7 @@
         self._update_checksum(start_byte, payload)
 
         headers = {
+            **self._headers,
             _CONTENT_TYPE_HEADER: self._content_type,
             _helpers.CONTENT_RANGE_HEADER: content_range,
         }
@@ -647,7 +658,7 @@
         """
         self._invalid = True
 
-    def _process_response(self, response, bytes_sent):
+    def _process_resumable_response(self, response, bytes_sent):
         """Process the response from an HTTP request.
 
         This is everything that must be done after a request that doesn't
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/common.py 
new/google-resumable-media-2.3.2/google/resumable_media/common.py
--- old/google-resumable-media-2.1.0/google/resumable_media/common.py   
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/resumable_media/common.py   
2022-03-08 21:51:16.000000000 +0100
@@ -26,7 +26,7 @@
 UPLOAD_CHUNK_SIZE = 262144  # 256 * 1024
 """int: Chunks in a resumable upload must come in multiples of 256 KB."""
 
-PERMANENT_REDIRECT = http.client.PERMANENT_REDIRECT
+PERMANENT_REDIRECT = http.client.PERMANENT_REDIRECT  # type: ignore
 """int: Permanent redirect status code.
 
 .. note::
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/requests/_request_helpers.py
 
new/google-resumable-media-2.3.2/google/resumable_media/requests/_request_helpers.py
--- 
old/google-resumable-media-2.1.0/google/resumable_media/requests/_request_helpers.py
        2021-10-25 19:33:28.000000000 +0200
+++ 
new/google-resumable-media-2.3.2/google/resumable_media/requests/_request_helpers.py
        2022-03-08 21:51:16.000000000 +0100
@@ -18,7 +18,7 @@
 """
 
 import requests.exceptions
-import urllib3.exceptions
+import urllib3.exceptions  # type: ignore
 
 import time
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/requests/download.py 
new/google-resumable-media-2.3.2/google/resumable_media/requests/download.py
--- 
old/google-resumable-media-2.1.0/google/resumable_media/requests/download.py    
    2021-10-25 19:33:28.000000000 +0200
+++ 
new/google-resumable-media-2.3.2/google/resumable_media/requests/download.py    
    2022-03-08 21:51:16.000000000 +0100
@@ -14,7 +14,7 @@
 
 """Support for downloading media from Google APIs."""
 
-import urllib3.response
+import urllib3.response  # type: ignore
 
 from google.resumable_media import _download
 from google.resumable_media import common
@@ -86,12 +86,22 @@
                 checksum doesn't agree with server-computed checksum.
         """
 
-        # `_get_expected_checksum()` may return None even if a checksum was
-        # requested, in which case it will emit an info log _MISSING_CHECKSUM.
-        # If an invalid checksum type is specified, this will raise ValueError.
-        expected_checksum, checksum_object = _helpers._get_expected_checksum(
-            response, self._get_headers, self.media_url, 
checksum_type=self.checksum
-        )
+        # Retrieve the expected checksum only once for the download request,
+        # then compute and validate the checksum when the full download 
completes.
+        # Retried requests are range requests, and there's no way to detect
+        # data corruption for that byte range alone.
+        if self._expected_checksum is None and self._checksum_object is None:
+            # `_get_expected_checksum()` may return None even if a checksum was
+            # requested, in which case it will emit an info log 
_MISSING_CHECKSUM.
+            # If an invalid checksum type is specified, this will raise 
ValueError.
+            expected_checksum, checksum_object = 
_helpers._get_expected_checksum(
+                response, self._get_headers, self.media_url, 
checksum_type=self.checksum
+            )
+            self._expected_checksum = expected_checksum
+            self._checksum_object = checksum_object
+        else:
+            expected_checksum = self._expected_checksum
+            checksum_object = self._checksum_object
 
         with response:
             # NOTE: In order to handle compressed streams gracefully, we try
@@ -104,6 +114,7 @@
             )
             for chunk in body_iter:
                 self._stream.write(chunk)
+                self._bytes_downloaded += len(chunk)
                 local_checksum_object.update(chunk)
 
         if expected_checksum is not None:
@@ -150,7 +161,7 @@
             ValueError: If the current :class:`Download` has already
                 finished.
         """
-        method, url, payload, headers = self._prepare_request()
+        method, _, payload, headers = self._prepare_request()
         # NOTE: We assume "payload is None" but pass it along anyway.
         request_kwargs = {
             "data": payload,
@@ -160,10 +171,39 @@
         if self._stream is not None:
             request_kwargs["stream"] = True
 
+        # Assign object generation if generation is specified in the media url.
+        if self._object_generation is None:
+            self._object_generation = 
_helpers._get_generation_from_url(self.media_url)
+
         # Wrap the request business logic in a function to be retried.
         def retriable_request():
+            url = self.media_url
+
+            # To restart an interrupted download, read from the offset of last 
byte
+            # received using a range request, and set object generation query 
param.
+            if self._bytes_downloaded > 0:
+                _download.add_bytes_range(
+                    self._bytes_downloaded, self.end, self._headers
+                )
+                request_kwargs["headers"] = self._headers
+
+                # Set object generation query param to ensure the same object 
content is requested.
+                if (
+                    self._object_generation is not None
+                    and _helpers._get_generation_from_url(self.media_url) is 
None
+                ):
+                    query_param = {"generation": self._object_generation}
+                    url = _helpers.add_query_parameters(self.media_url, 
query_param)
+
             result = transport.request(method, url, **request_kwargs)
 
+            # If a generation hasn't been specified, and this is the first 
response we get, let's record the
+            # generation. In future requests we'll specify the generation 
query param to avoid data races.
+            if self._object_generation is None:
+                self._object_generation = _helpers._parse_generation_header(
+                    result, self._get_headers
+                )
+
             self._process_response(result)
 
             if self._stream is not None:
@@ -223,13 +263,22 @@
             ~google.resumable_media.common.DataCorruption: If the download's
                 checksum doesn't agree with server-computed checksum.
         """
-
-        # `_get_expected_checksum()` may return None even if a checksum was
-        # requested, in which case it will emit an info log _MISSING_CHECKSUM.
-        # If an invalid checksum type is specified, this will raise ValueError.
-        expected_checksum, checksum_object = _helpers._get_expected_checksum(
-            response, self._get_headers, self.media_url, 
checksum_type=self.checksum
-        )
+        # Retrieve the expected checksum only once for the download request,
+        # then compute and validate the checksum when the full download 
completes.
+        # Retried requests are range requests, and there's no way to detect
+        # data corruption for that byte range alone.
+        if self._expected_checksum is None and self._checksum_object is None:
+            # `_get_expected_checksum()` may return None even if a checksum was
+            # requested, in which case it will emit an info log 
_MISSING_CHECKSUM.
+            # If an invalid checksum type is specified, this will raise 
ValueError.
+            expected_checksum, checksum_object = 
_helpers._get_expected_checksum(
+                response, self._get_headers, self.media_url, 
checksum_type=self.checksum
+            )
+            self._expected_checksum = expected_checksum
+            self._checksum_object = checksum_object
+        else:
+            expected_checksum = self._expected_checksum
+            checksum_object = self._checksum_object
 
         with response:
             body_iter = response.raw.stream(
@@ -237,6 +286,7 @@
             )
             for chunk in body_iter:
                 self._stream.write(chunk)
+                self._bytes_downloaded += len(chunk)
                 checksum_object.update(chunk)
             response._content_consumed = True
 
@@ -285,19 +335,47 @@
             ValueError: If the current :class:`Download` has already
                 finished.
         """
-        method, url, payload, headers = self._prepare_request()
+        method, _, payload, headers = self._prepare_request()
+        # NOTE: We assume "payload is None" but pass it along anyway.
+        request_kwargs = {
+            "data": payload,
+            "headers": headers,
+            "timeout": timeout,
+            "stream": True,
+        }
+
+        # Assign object generation if generation is specified in the media url.
+        if self._object_generation is None:
+            self._object_generation = 
_helpers._get_generation_from_url(self.media_url)
 
         # Wrap the request business logic in a function to be retried.
         def retriable_request():
-            # NOTE: We assume "payload is None" but pass it along anyway.
-            result = transport.request(
-                method,
-                url,
-                data=payload,
-                headers=headers,
-                stream=True,
-                timeout=timeout,
-            )
+            url = self.media_url
+
+            # To restart an interrupted download, read from the offset of last 
byte
+            # received using a range request, and set object generation query 
param.
+            if self._bytes_downloaded > 0:
+                _download.add_bytes_range(
+                    self._bytes_downloaded, self.end, self._headers
+                )
+                request_kwargs["headers"] = self._headers
+
+                # Set object generation query param to ensure the same object 
content is requested.
+                if (
+                    self._object_generation is not None
+                    and _helpers._get_generation_from_url(self.media_url) is 
None
+                ):
+                    query_param = {"generation": self._object_generation}
+                    url = _helpers.add_query_parameters(self.media_url, 
query_param)
+
+            result = transport.request(method, url, **request_kwargs)
+
+            # If a generation hasn't been specified, and this is the first 
response we get, let's record the
+            # generation. In future requests we'll specify the generation 
query param to avoid data races.
+            if self._object_generation is None:
+                self._object_generation = _helpers._parse_generation_header(
+                    result, self._get_headers
+                )
 
             self._process_response(result)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google/resumable_media/requests/upload.py 
new/google-resumable-media-2.3.2/google/resumable_media/requests/upload.py
--- old/google-resumable-media-2.1.0/google/resumable_media/requests/upload.py  
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/google/resumable_media/requests/upload.py  
2022-03-08 21:51:16.000000000 +0100
@@ -508,7 +508,7 @@
                 method, url, data=payload, headers=headers, timeout=timeout
             )
 
-            self._process_response(result, len(payload))
+            self._process_resumable_response(result, len(payload))
 
             return result
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/google_resumable_media.egg-info/PKG-INFO 
new/google-resumable-media-2.3.2/google_resumable_media.egg-info/PKG-INFO
--- old/google-resumable-media-2.1.0/google_resumable_media.egg-info/PKG-INFO   
2021-10-25 19:36:02.000000000 +0200
+++ new/google-resumable-media-2.3.2/google_resumable_media.egg-info/PKG-INFO   
2022-03-08 21:54:01.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: google-resumable-media
-Version: 2.1.0
+Version: 2.3.2
 Summary: Utilities for Google Media Downloads and Resumable Uploads
 Home-page: https://github.com/googleapis/google-resumable-media-python
 Author: Google Cloud Platform
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/google-resumable-media-2.1.0/setup.py 
new/google-resumable-media-2.3.2/setup.py
--- old/google-resumable-media-2.1.0/setup.py   2021-10-25 19:33:28.000000000 
+0200
+++ new/google-resumable-media-2.3.2/setup.py   2022-03-08 21:51:16.000000000 
+0100
@@ -35,7 +35,7 @@
 
 setuptools.setup(
     name='google-resumable-media',
-    version = "2.1.0",
+    version = "2.3.2",
     description='Utilities for Google Media Downloads and Resumable Uploads',
     author='Google Cloud Platform',
     author_email='[email protected]',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/system/requests/conftest.py 
new/google-resumable-media-2.3.2/tests/system/requests/conftest.py
--- old/google-resumable-media-2.1.0/tests/system/requests/conftest.py  
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/system/requests/conftest.py  
2022-03-08 21:51:16.000000000 +0100
@@ -13,9 +13,9 @@
 # limitations under the License.
 """py.test fixtures to be shared across multiple system test modules."""
 
-import google.auth
-import google.auth.transport.requests as tr_requests
-import pytest
+import google.auth  # type: ignore
+import google.auth.transport.requests as tr_requests  # type: ignore
+import pytest  # type: ignore
 
 from tests.system import utils
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/system/requests/test_download.py 
new/google-resumable-media-2.3.2/tests/system/requests/test_download.py
--- old/google-resumable-media-2.1.0/tests/system/requests/test_download.py     
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/system/requests/test_download.py     
2022-03-08 21:51:16.000000000 +0100
@@ -19,9 +19,9 @@
 import io
 import os
 
-import google.auth
-import google.auth.transport.requests as tr_requests
-import pytest
+import google.auth  # type: ignore
+import google.auth.transport.requests as tr_requests  # type: ignore
+import pytest  # type: ignore
 
 from google.resumable_media import common
 import google.resumable_media.requests as resumable_requests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/system/requests/test_upload.py 
new/google-resumable-media-2.3.2/tests/system/requests/test_upload.py
--- old/google-resumable-media-2.1.0/tests/system/requests/test_upload.py       
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/system/requests/test_upload.py       
2022-03-08 21:51:16.000000000 +0100
@@ -19,7 +19,7 @@
 import os
 import urllib.parse
 
-import pytest
+import pytest  # type: ignore
 import mock
 
 from google.resumable_media import common
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/google-resumable-media-2.1.0/tests/system/utils.py 
new/google-resumable-media-2.3.2/tests/system/utils.py
--- old/google-resumable-media-2.1.0/tests/system/utils.py      2021-10-25 
19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/system/utils.py      2022-03-08 
21:51:16.000000000 +0100
@@ -16,7 +16,7 @@
 import hashlib
 import time
 
-from test_utils.retry import RetryResult
+from test_utils.retry import RetryResult  # type: ignore
 
 
 BUCKET_NAME = "grpm-systest-{}".format(int(1000 * time.time()))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/unit/requests/test__helpers.py 
new/google-resumable-media-2.3.2/tests/unit/requests/test__helpers.py
--- old/google-resumable-media-2.1.0/tests/unit/requests/test__helpers.py       
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/unit/requests/test__helpers.py       
2022-03-08 21:51:16.000000000 +0100
@@ -15,10 +15,10 @@
 import http.client
 
 import mock
-import pytest
+import pytest  # type: ignore
 
 import requests.exceptions
-import urllib3.exceptions
+import urllib3.exceptions  # type: ignore
 
 from google.resumable_media import common
 from google.resumable_media.requests import _request_helpers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/unit/requests/test_download.py 
new/google-resumable-media-2.3.2/tests/unit/requests/test_download.py
--- old/google-resumable-media-2.1.0/tests/unit/requests/test_download.py       
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/unit/requests/test_download.py       
2022-03-08 21:51:16.000000000 +0100
@@ -16,7 +16,7 @@
 import io
 
 import mock
-import pytest
+import pytest  # type: ignore
 
 from google.resumable_media import common
 from google.resumable_media import _helpers
@@ -42,6 +42,7 @@
         assert ret_val is None
 
         assert stream.getvalue() == chunk1 + chunk2
+        assert download._bytes_downloaded == len(chunk1 + chunk2)
 
         # Check mocks.
         response.__enter__.assert_called_once_with()
@@ -66,6 +67,8 @@
         assert ret_val is None
 
         assert stream.getvalue() == chunk1 + chunk2 + chunk3
+        assert download._bytes_downloaded == len(chunk1 + chunk2 + chunk3)
+        assert download._checksum_object is not None
 
         # Check mocks.
         response.__enter__.assert_called_once_with()
@@ -273,6 +276,125 @@
         # Make sure the headers have been modified.
         assert headers == {"range": range_bytes}
 
+    def test_consume_gets_generation_from_url(self):
+        GENERATION_VALUE = 1641590104888641
+        url = EXAMPLE_URL + f"&generation={GENERATION_VALUE}"
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+
+        download = download_mod.Download(
+            url, stream=stream, end=65536, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        transport.request.return_value = _mock_response(chunks=chunks, 
headers=None)
+
+        assert not download.finished
+        assert download._object_generation is None
+
+        ret_val = download.consume(transport)
+
+        assert download._object_generation == GENERATION_VALUE
+        assert ret_val is transport.request.return_value
+        assert stream.getvalue() == b"".join(chunks)
+
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", url, **called_kwargs)
+
+    def test_consume_gets_generation_from_headers(self):
+        GENERATION_VALUE = 1641590104888641
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+
+        download = download_mod.Download(
+            EXAMPLE_URL, stream=stream, end=65536, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        headers = {_helpers._GENERATION_HEADER: GENERATION_VALUE}
+        transport.request.return_value = _mock_response(chunks=chunks, 
headers=headers)
+
+        assert not download.finished
+        assert download._object_generation is None
+
+        ret_val = download.consume(transport)
+
+        assert download._object_generation == GENERATION_VALUE
+        assert ret_val is transport.request.return_value
+        assert stream.getvalue() == b"".join(chunks)
+
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", EXAMPLE_URL, 
**called_kwargs)
+
+    def test_consume_w_object_generation(self):
+        GENERATION_VALUE = 1641590104888641
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+        end = 65536
+
+        download = download_mod.Download(
+            EXAMPLE_URL, stream=stream, end=end, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        transport.request.return_value = _mock_response(chunks=chunks, 
headers=None)
+
+        assert download._object_generation is None
+
+        # Mock a retry operation with object generation retrieved and bytes 
already downloaded in the stream
+        download._object_generation = GENERATION_VALUE
+        offset = 256
+        download._bytes_downloaded = offset
+        download.consume(transport)
+
+        expected_url = EXAMPLE_URL + f"&generation={GENERATION_VALUE}"
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", expected_url, 
**called_kwargs)
+        range_bytes = "bytes={:d}-{:d}".format(offset, end)
+        assert download._headers["range"] == range_bytes
+
+    def test_consume_w_bytes_downloaded(self):
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+        end = 65536
+
+        download = download_mod.Download(
+            EXAMPLE_URL, stream=stream, end=end, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        transport.request.return_value = _mock_response(chunks=chunks, 
headers=None)
+
+        assert download._bytes_downloaded == 0
+
+        # Mock a retry operation with bytes already downloaded in the stream 
and checksum stored
+        offset = 256
+        download._bytes_downloaded = offset
+        download._expected_checksum = None
+        download._checksum_object = _helpers._DoNothingHash()
+        download.consume(transport)
+
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", EXAMPLE_URL, 
**called_kwargs)
+        range_bytes = "bytes={:d}-{:d}".format(offset, end)
+        assert download._headers["range"] == range_bytes
+
 
 class TestRawDownload(object):
     def test__write_to_stream_no_hash_check(self):
@@ -287,6 +409,7 @@
         assert ret_val is None
 
         assert stream.getvalue() == chunk1 + chunk2
+        assert download._bytes_downloaded == len(chunk1 + chunk2)
 
         # Check mocks.
         response.__enter__.assert_called_once_with()
@@ -313,6 +436,8 @@
         assert ret_val is None
 
         assert stream.getvalue() == chunk1 + chunk2 + chunk3
+        assert download._bytes_downloaded == len(chunk1 + chunk2 + chunk3)
+        assert download._checksum_object is not None
 
         # Check mocks.
         response.__enter__.assert_called_once_with()
@@ -526,6 +651,127 @@
         # Make sure the headers have been modified.
         assert headers == {"range": range_bytes}
 
+    def test_consume_gets_generation_from_url(self):
+        GENERATION_VALUE = 1641590104888641
+        url = EXAMPLE_URL + f"&generation={GENERATION_VALUE}"
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+
+        download = download_mod.RawDownload(
+            url, stream=stream, end=65536, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        transport.request.return_value = _mock_raw_response(chunks=chunks, 
headers=None)
+
+        assert not download.finished
+        assert download._object_generation is None
+
+        ret_val = download.consume(transport)
+
+        assert download._object_generation == GENERATION_VALUE
+        assert ret_val is transport.request.return_value
+        assert stream.getvalue() == b"".join(chunks)
+
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", url, **called_kwargs)
+
+    def test_consume_gets_generation_from_headers(self):
+        GENERATION_VALUE = 1641590104888641
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+
+        download = download_mod.RawDownload(
+            EXAMPLE_URL, stream=stream, end=65536, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        headers = {_helpers._GENERATION_HEADER: GENERATION_VALUE}
+        transport.request.return_value = _mock_raw_response(
+            chunks=chunks, headers=headers
+        )
+
+        assert not download.finished
+        assert download._object_generation is None
+
+        ret_val = download.consume(transport)
+
+        assert download._object_generation == GENERATION_VALUE
+        assert ret_val is transport.request.return_value
+        assert stream.getvalue() == b"".join(chunks)
+
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", EXAMPLE_URL, 
**called_kwargs)
+
+    def test_consume_w_object_generation(self):
+        GENERATION_VALUE = 1641590104888641
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+        end = 65536
+
+        download = download_mod.RawDownload(
+            EXAMPLE_URL, stream=stream, end=end, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        transport.request.return_value = _mock_raw_response(chunks=chunks, 
headers=None)
+
+        assert download._object_generation is None
+
+        # Mock a retry operation with object generation retrieved and bytes 
already downloaded in the stream
+        download._object_generation = GENERATION_VALUE
+        offset = 256
+        download._bytes_downloaded = offset
+        download.consume(transport)
+
+        expected_url = EXAMPLE_URL + f"&generation={GENERATION_VALUE}"
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", expected_url, 
**called_kwargs)
+        range_bytes = "bytes={:d}-{:d}".format(offset, end)
+        assert download._headers["range"] == range_bytes
+
+    def test_consume_w_bytes_downloaded(self):
+        stream = io.BytesIO()
+        chunks = (b"up down ", b"charlie ", b"brown")
+        end = 65536
+
+        download = download_mod.RawDownload(
+            EXAMPLE_URL, stream=stream, end=end, headers=None, checksum="md5"
+        )
+        transport = mock.Mock(spec=["request"])
+        transport.request.return_value = _mock_raw_response(chunks=chunks, 
headers=None)
+
+        assert download._bytes_downloaded == 0
+
+        # Mock a retry operation with bytes already downloaded in the stream 
and checksum stored
+        offset = 256
+        download._bytes_downloaded = offset
+        download._expected_checksum = None
+        download._checksum_object = _helpers._DoNothingHash()
+        download.consume(transport)
+
+        called_kwargs = {
+            "data": None,
+            "headers": download._headers,
+            "timeout": EXPECTED_TIMEOUT,
+            "stream": True,
+        }
+        transport.request.assert_called_once_with("GET", EXAMPLE_URL, 
**called_kwargs)
+        range_bytes = "bytes={:d}-{:d}".format(offset, end)
+        assert download._headers["range"] == range_bytes
+
 
 class TestChunkedDownload(object):
     @staticmethod
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/unit/test__download.py 
new/google-resumable-media-2.3.2/tests/unit/test__download.py
--- old/google-resumable-media-2.1.0/tests/unit/test__download.py       
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/unit/test__download.py       
2022-03-08 21:51:16.000000000 +0100
@@ -16,7 +16,7 @@
 import io
 
 import mock
-import pytest
+import pytest  # type: ignore
 
 from google.resumable_media import _download
 from google.resumable_media import common
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/unit/test__helpers.py 
new/google-resumable-media-2.3.2/tests/unit/test__helpers.py
--- old/google-resumable-media-2.1.0/tests/unit/test__helpers.py        
2021-10-25 19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/unit/test__helpers.py        
2022-03-08 21:51:16.000000000 +0100
@@ -18,7 +18,7 @@
 import http.client
 
 import mock
-import pytest
+import pytest  # type: ignore
 
 from google.resumable_media import _helpers
 from google.resumable_media import common
@@ -408,6 +408,66 @@
         assert error.args[2] == [self.MD5_CHECKSUM, another_checksum]
 
 
+class Test__parse_generation_header(object):
+
+    GENERATION_VALUE = 1641590104888641
+
+    def test_empty_value(self):
+        headers = {}
+        response = _mock_response(headers=headers)
+        generation_header = _helpers._parse_generation_header(response, 
_get_headers)
+        assert generation_header is None
+
+    def test_header_value(self):
+        headers = {_helpers._GENERATION_HEADER: self.GENERATION_VALUE}
+        response = _mock_response(headers=headers)
+        generation_header = _helpers._parse_generation_header(response, 
_get_headers)
+        assert generation_header == self.GENERATION_VALUE
+
+
+class Test__get_generation_from_url(object):
+
+    GENERATION_VALUE = 1641590104888641
+    MEDIA_URL = (
+        
"https://storage.googleapis.com/storage/v1/b/my-bucket/o/my-object?alt=media";
+    )
+    MEDIA_URL_W_GENERATION = MEDIA_URL + f"&generation={GENERATION_VALUE}"
+
+    def test_empty_value(self):
+        generation = _helpers._get_generation_from_url(self.MEDIA_URL)
+        assert generation is None
+
+    def test_generation_in_url(self):
+        generation = 
_helpers._get_generation_from_url(self.MEDIA_URL_W_GENERATION)
+        assert generation == self.GENERATION_VALUE
+
+
+class Test__add_query_parameters(object):
+    def test_w_empty_list(self):
+        query_params = {}
+        MEDIA_URL = 
"https://storage.googleapis.com/storage/v1/b/my-bucket/o/my-object";
+        new_url = _helpers.add_query_parameters(MEDIA_URL, query_params)
+        assert new_url == MEDIA_URL
+
+    def test_wo_existing_qs(self):
+        query_params = {"one": "One", "two": "Two"}
+        MEDIA_URL = 
"https://storage.googleapis.com/storage/v1/b/my-bucket/o/my-object";
+        expected = "&".join(
+            ["{}={}".format(name, value) for name, value in 
query_params.items()]
+        )
+        new_url = _helpers.add_query_parameters(MEDIA_URL, query_params)
+        assert new_url == "{}?{}".format(MEDIA_URL, expected)
+
+    def test_w_existing_qs(self):
+        query_params = {"one": "One", "two": "Two"}
+        MEDIA_URL = 
"https://storage.googleapis.com/storage/v1/b/my-bucket/o/my-object?alt=media";
+        expected = "&".join(
+            ["{}={}".format(name, value) for name, value in 
query_params.items()]
+        )
+        new_url = _helpers.add_query_parameters(MEDIA_URL, query_params)
+        assert new_url == "{}&{}".format(MEDIA_URL, expected)
+
+
 def _mock_response(headers):
     return mock.Mock(
         headers=headers,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/unit/test__upload.py 
new/google-resumable-media-2.3.2/tests/unit/test__upload.py
--- old/google-resumable-media-2.1.0/tests/unit/test__upload.py 2021-10-25 
19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/unit/test__upload.py 2022-03-08 
21:51:16.000000000 +0100
@@ -17,7 +17,7 @@
 import sys
 
 import mock
-import pytest
+import pytest  # type: ignore
 
 from google.resumable_media import _helpers
 from google.resumable_media import _upload
@@ -393,12 +393,14 @@
         upload._total_bytes = 8192
         assert upload.total_bytes == 8192
 
-    def _prepare_initiate_request_helper(self, upload_headers=None, 
**method_kwargs):
+    def _prepare_initiate_request_helper(
+        self, upload_url=RESUMABLE_URL, upload_headers=None, **method_kwargs
+    ):
         data = b"some really big big data."
         stream = io.BytesIO(data)
         metadata = {"name": "big-data-file.txt"}
 
-        upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB, 
headers=upload_headers)
+        upload = _upload.ResumableUpload(upload_url, ONE_MB, 
headers=upload_headers)
         orig_headers = upload._headers.copy()
         # Check ``upload``-s state before.
         assert upload._stream is None
@@ -435,8 +437,28 @@
         }
         assert headers == expected_headers
 
+    def test_prepare_initiate_request_with_signed_url(self):
+        signed_urls = [
+            "https://storage.googleapis.com/b/o?x-goog-signature=123abc";,
+            "https://storage.googleapis.com/b/o?X-Goog-Signature=123abc";,
+        ]
+        for signed_url in signed_urls:
+            data, headers = self._prepare_initiate_request_helper(
+                upload_url=signed_url,
+            )
+            expected_headers = {
+                "content-type": BASIC_CONTENT,
+                "x-upload-content-length": "{:d}".format(len(data)),
+            }
+            assert headers == expected_headers
+
     def test__prepare_initiate_request_with_headers(self):
-        headers = {"caviar": "beluga", "top": "quark"}
+        # content-type header should be overwritten, the rest should stay
+        headers = {
+            "caviar": "beluga",
+            "top": "quark",
+            "content-type": "application/xhtml",
+        }
         data, new_headers = self._prepare_initiate_request_helper(
             upload_headers=headers
         )
@@ -600,16 +622,15 @@
         assert headers == expected_headers
 
     def test__prepare_request_success_with_headers(self):
-        headers = {"cannot": "touch this"}
+        headers = {"keep": "this"}
         new_headers = self._prepare_request_helper(headers)
         assert new_headers is not headers
         expected_headers = {
+            "keep": "this",
             "content-range": "bytes 0-32/33",
             "content-type": BASIC_CONTENT,
         }
         assert new_headers == expected_headers
-        # Make sure the ``_headers`` are not incorporated.
-        assert "cannot" not in new_headers
 
     @pytest.mark.parametrize("checksum", ["md5", "crc32c"])
     def test__prepare_request_with_checksum(self, checksum):
@@ -705,7 +726,7 @@
         upload._make_invalid()
         assert upload.invalid
 
-    def test__process_response_bad_status(self):
+    def test__process_resumable_response_bad_status(self):
         upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB)
         _fix_up_virtual(upload)
 
@@ -713,7 +734,7 @@
         assert not upload.invalid
         response = _make_response(status_code=http.client.NOT_FOUND)
         with pytest.raises(common.InvalidResponse) as exc_info:
-            upload._process_response(response, None)
+            upload._process_resumable_response(response, None)
 
         error = exc_info.value
         assert error.response is response
@@ -724,7 +745,7 @@
         # Make sure the upload is invalid after the failure.
         assert upload.invalid
 
-    def test__process_response_success(self):
+    def test__process_resumable_response_success(self):
         upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB)
         _fix_up_virtual(upload)
 
@@ -743,13 +764,13 @@
             status_code=http.client.OK,
             spec=["content", "status_code"],
         )
-        ret_val = upload._process_response(response, bytes_sent)
+        ret_val = upload._process_resumable_response(response, bytes_sent)
         assert ret_val is None
         # Check status after.
         assert upload._bytes_uploaded == total_bytes
         assert upload._finished
 
-    def test__process_response_partial_no_range(self):
+    def test__process_resumable_response_partial_no_range(self):
         upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB)
         _fix_up_virtual(upload)
 
@@ -757,7 +778,7 @@
         # Make sure the upload is valid before the failure.
         assert not upload.invalid
         with pytest.raises(common.InvalidResponse) as exc_info:
-            upload._process_response(response, None)
+            upload._process_resumable_response(response, None)
         # Make sure the upload is invalid after the failure.
         assert upload.invalid
 
@@ -767,7 +788,7 @@
         assert len(error.args) == 2
         assert error.args[1] == "range"
 
-    def test__process_response_partial_bad_range(self):
+    def test__process_resumable_response_partial_bad_range(self):
         upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB)
         _fix_up_virtual(upload)
 
@@ -778,7 +799,7 @@
             status_code=http.client.PERMANENT_REDIRECT, headers=headers
         )
         with pytest.raises(common.InvalidResponse) as exc_info:
-            upload._process_response(response, 81)
+            upload._process_resumable_response(response, 81)
 
         # Check the error response.
         error = exc_info.value
@@ -788,7 +809,7 @@
         # Make sure the upload is invalid after the failure.
         assert upload.invalid
 
-    def test__process_response_partial(self):
+    def test__process_resumable_response_partial(self):
         upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB)
         _fix_up_virtual(upload)
 
@@ -798,7 +819,7 @@
         response = _make_response(
             status_code=http.client.PERMANENT_REDIRECT, headers=headers
         )
-        ret_val = upload._process_response(response, 172)
+        ret_val = upload._process_resumable_response(response, 172)
         assert ret_val is None
         # Check status after.
         assert upload._bytes_uploaded == 172
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/google-resumable-media-2.1.0/tests/unit/test_common.py 
new/google-resumable-media-2.3.2/tests/unit/test_common.py
--- old/google-resumable-media-2.1.0/tests/unit/test_common.py  2021-10-25 
19:33:28.000000000 +0200
+++ new/google-resumable-media-2.3.2/tests/unit/test_common.py  2022-03-08 
21:51:16.000000000 +0100
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 import mock
-import pytest
+import pytest  # type: ignore
 
 from google.resumable_media import common
 

Reply via email to