Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-google-cloud-storage for
openSUSE:Factory checked in at 2023-01-09 17:23:27
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-google-cloud-storage (Old)
and /work/SRC/openSUSE:Factory/.python-google-cloud-storage.new.32243
(New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-google-cloud-storage"
Mon Jan 9 17:23:27 2023 rev:17 rq:1056981 version:2.7.0
Changes:
--------
---
/work/SRC/openSUSE:Factory/python-google-cloud-storage/python-google-cloud-storage.changes
2022-11-17 17:25:07.469262230 +0100
+++
/work/SRC/openSUSE:Factory/.python-google-cloud-storage.new.32243/python-google-cloud-storage.changes
2023-01-09 17:23:27.611042199 +0100
@@ -1,0 +2,9 @@
+Fri Jan 6 10:59:03 UTC 2023 - John Paul Adrian Glaubitz
<[email protected]>
+
+- Update to 2.7.0
+ * Add "transfer_manager" module for concurrent uploads and
+ downloads, as a preview feature (#943)
+ * Add use_auth_w_custom_endpoint support (#941)
+ * Implement closed property on fileio.py classes (#907)
+
+-------------------------------------------------------------------
Old:
----
google-cloud-storage-2.6.0.tar.gz
New:
----
google-cloud-storage-2.7.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-google-cloud-storage.spec ++++++
--- /var/tmp/diff_new_pack.8Kictv/_old 2023-01-09 17:23:29.087050552 +0100
+++ /var/tmp/diff_new_pack.8Kictv/_new 2023-01-09 17:23:29.091050574 +0100
@@ -1,7 +1,7 @@
#
# spec file
#
-# Copyright (c) 2022 SUSE LLC
+# Copyright (c) 2023 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -30,7 +30,7 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
Name: python-google-cloud-storage%{pkg_suffix}
-Version: 2.6.0
+Version: 2.7.0
Release: 0
Summary: Google Cloud Storage API python client library
License: Apache-2.0
@@ -89,7 +89,7 @@
%if %{with test}
%check
#export PYTEST_ADDOPTS="--import-mode=importlib"
-%pytest -k 'not network' tests/unit
+%pytest -k 'not network and not w_custom_endpoint' tests/unit
%endif
%if !%{with test}
++++++ demock.patch ++++++
--- /var/tmp/diff_new_pack.8Kictv/_old 2023-01-09 17:23:29.127050778 +0100
+++ /var/tmp/diff_new_pack.8Kictv/_new 2023-01-09 17:23:29.131050801 +0100
@@ -1,7 +1,8 @@
-diff -upr google-cloud-storage-2.4.0.orig/tests/system/test_blob.py
google-cloud-storage-2.4.0/tests/system/test_blob.py
---- google-cloud-storage-2.4.0.orig/tests/system/test_blob.py 2022-06-09
11:55:42.724388610 +0200
-+++ google-cloud-storage-2.4.0/tests/system/test_blob.py 2022-06-09
11:55:42.776388901 +0200
-@@ -20,7 +20,7 @@ import tempfile
+Index: google-cloud-storage-2.7.0/tests/system/test_blob.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/system/test_blob.py
++++ google-cloud-storage-2.7.0/tests/system/test_blob.py
+@@ -21,7 +21,7 @@ import uuid
import warnings
import pytest
@@ -10,9 +11,10 @@
from google import resumable_media
from google.api_core import exceptions
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_acl.py
google-cloud-storage-2.4.0/tests/unit/test_acl.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_acl.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_acl.py 2022-06-09
11:55:42.780388923 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_acl.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_acl.py
++++ google-cloud-storage-2.7.0/tests/unit/test_acl.py
@@ -14,7 +14,7 @@
import unittest
@@ -22,9 +24,10 @@
from google.cloud.storage.retry import (
DEFAULT_RETRY,
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_batch.py
google-cloud-storage-2.4.0/tests/unit/test_batch.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_batch.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_batch.py 2022-06-09
11:55:42.780388923 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_batch.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_batch.py
++++ google-cloud-storage-2.7.0/tests/unit/test_batch.py
@@ -17,7 +17,7 @@ from http.client import SERVICE_UNAVAILA
from http.client import NO_CONTENT
import unittest
@@ -34,9 +37,10 @@
import requests
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_blob.py
google-cloud-storage-2.4.0/tests/unit/test_blob.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_blob.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_blob.py 2022-06-09
11:55:42.784388945 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_blob.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_blob.py
++++ google-cloud-storage-2.7.0/tests/unit/test_blob.py
@@ -24,7 +24,7 @@ import http.client
from unittest.mock import patch
from urllib.parse import urlencode
@@ -46,9 +50,10 @@
import pytest
from google.cloud.storage import _helpers
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_bucket.py
google-cloud-storage-2.4.0/tests/unit/test_bucket.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_bucket.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_bucket.py 2022-06-09
11:55:42.784388945 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_bucket.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_bucket.py
++++ google-cloud-storage-2.7.0/tests/unit/test_bucket.py
@@ -15,7 +15,7 @@
import datetime
import unittest
@@ -58,9 +63,10 @@
import pytest
from google.cloud.storage.retry import DEFAULT_RETRY
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_client.py
google-cloud-storage-2.4.0/tests/unit/test_client.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_client.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_client.py 2022-06-09
11:55:42.784388945 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_client.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_client.py
++++ google-cloud-storage-2.7.0/tests/unit/test_client.py
@@ -17,7 +17,7 @@ import http.client
import io
import json
@@ -70,9 +76,10 @@
import pytest
import re
import requests
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_fileio.py
google-cloud-storage-2.4.0/tests/unit/test_fileio.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_fileio.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_fileio.py 2022-06-09
11:55:42.784388945 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_fileio.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_fileio.py
++++ google-cloud-storage-2.7.0/tests/unit/test_fileio.py
@@ -18,7 +18,7 @@ import unittest
import io
import string
@@ -82,9 +89,10 @@
from google.api_core.exceptions import RequestRangeNotSatisfiable
from google.cloud.storage.retry import DEFAULT_RETRY
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test__helpers.py
google-cloud-storage-2.4.0/tests/unit/test__helpers.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test__helpers.py
2022-06-09 11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test__helpers.py 2022-06-09
11:55:42.780388923 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test__helpers.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test__helpers.py
++++ google-cloud-storage-2.7.0/tests/unit/test__helpers.py
@@ -14,7 +14,7 @@
import unittest
@@ -103,9 +111,10 @@
class _Buffer(object):
def __init__(self, return_vals):
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_hmac_key.py
google-cloud-storage-2.4.0/tests/unit/test_hmac_key.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_hmac_key.py
2022-06-09 11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_hmac_key.py 2022-06-09
11:55:42.788388968 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_hmac_key.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_hmac_key.py
++++ google-cloud-storage-2.7.0/tests/unit/test_hmac_key.py
@@ -14,7 +14,7 @@
import unittest
@@ -115,9 +124,10 @@
from google.cloud.storage.retry import DEFAULT_RETRY
from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test__http.py
google-cloud-storage-2.4.0/tests/unit/test__http.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test__http.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test__http.py 2022-06-09
11:55:42.780388923 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test__http.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test__http.py
++++ google-cloud-storage-2.7.0/tests/unit/test__http.py
@@ -15,7 +15,7 @@
import unittest
from unittest.mock import patch
@@ -127,9 +137,10 @@
from google.cloud.storage import _helpers
from tests.unit.test__helpers import GCCL_INVOCATION_TEST_CONST
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_notification.py
google-cloud-storage-2.4.0/tests/unit/test_notification.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_notification.py
2022-06-09 11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_notification.py 2022-06-09
11:55:42.788388968 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_notification.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_notification.py
++++ google-cloud-storage-2.7.0/tests/unit/test_notification.py
@@ -14,7 +14,7 @@
import unittest
@@ -139,9 +150,10 @@
from google.cloud.storage.retry import DEFAULT_RETRY
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test_retry.py
google-cloud-storage-2.4.0/tests/unit/test_retry.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test_retry.py 2022-06-09
11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test_retry.py 2022-06-09
11:55:42.788388968 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test_retry.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_retry.py
++++ google-cloud-storage-2.7.0/tests/unit/test_retry.py
@@ -16,7 +16,7 @@ import unittest
from google.cloud.storage import _helpers
@@ -151,9 +163,10 @@
class Test_should_retry(unittest.TestCase):
-diff -upr google-cloud-storage-2.4.0.orig/tests/unit/test__signing.py
google-cloud-storage-2.4.0/tests/unit/test__signing.py
---- google-cloud-storage-2.4.0.orig/tests/unit/test__signing.py
2022-06-09 11:55:42.748388744 +0200
-+++ google-cloud-storage-2.4.0/tests/unit/test__signing.py 2022-06-09
11:55:42.780388923 +0200
+Index: google-cloud-storage-2.7.0/tests/unit/test__signing.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test__signing.py
++++ google-cloud-storage-2.7.0/tests/unit/test__signing.py
@@ -23,7 +23,7 @@ import time
import unittest
import urllib.parse
@@ -163,4 +176,17 @@
import pytest
from . import _read_local_json
+Index: google-cloud-storage-2.7.0/tests/unit/test_transfer_manager.py
+===================================================================
+--- google-cloud-storage-2.7.0.orig/tests/unit/test_transfer_manager.py
++++ google-cloud-storage-2.7.0/tests/unit/test_transfer_manager.py
+@@ -22,7 +22,7 @@ from google.api_core import exceptions
+ import os
+ import tempfile
+ import unittest
+-import mock
++from unittest import mock
+
+
+ class Test_Transfer_Manager(unittest.TestCase):
++++++ google-cloud-storage-2.6.0.tar.gz -> google-cloud-storage-2.7.0.tar.gz
++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/google-cloud-storage-2.6.0/PKG-INFO
new/google-cloud-storage-2.7.0/PKG-INFO
--- old/google-cloud-storage-2.6.0/PKG-INFO 2022-11-07 22:52:41.194609000
+0100
+++ new/google-cloud-storage-2.7.0/PKG-INFO 2022-12-07 02:20:04.691713300
+0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: google-cloud-storage
-Version: 2.6.0
+Version: 2.7.0
Summary: Google Cloud Storage API client library
Home-page: https://github.com/googleapis/python-storage
Author: Google LLC
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google/cloud/storage/_helpers.py
new/google-cloud-storage-2.7.0/google/cloud/storage/_helpers.py
--- old/google-cloud-storage-2.6.0/google/cloud/storage/_helpers.py
2022-11-07 22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google/cloud/storage/_helpers.py
2022-12-07 02:16:52.000000000 +0100
@@ -33,17 +33,20 @@
STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST"
"""Environment variable defining host for Storage emulator."""
+_API_ENDPOINT_OVERRIDE_ENV_VAR = "API_ENDPOINT_OVERRIDE"
+"""This is an experimental configuration variable. Use api_endpoint instead."""
+
+_API_VERSION_OVERRIDE_ENV_VAR = "API_VERSION_OVERRIDE"
+"""This is an experimental configuration variable used for internal testing."""
+
_DEFAULT_STORAGE_HOST = os.getenv(
- "API_ENDPOINT_OVERRIDE", "https://storage.googleapis.com"
+ _API_ENDPOINT_OVERRIDE_ENV_VAR, "https://storage.googleapis.com"
)
"""Default storage host for JSON API."""
-_API_VERSION = os.getenv("API_VERSION_OVERRIDE", "v1")
+_API_VERSION = os.getenv(_API_VERSION_OVERRIDE_ENV_VAR, "v1")
"""API version of the default storage host"""
-_BASE_STORAGE_URI = "storage.googleapis.com"
-"""Base request endpoint URI for JSON API."""
-
# etag match parameters in snake case and equivalent header
_ETAG_MATCH_PARAMETERS = (
("if_etag_match", "If-Match"),
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google/cloud/storage/client.py
new/google-cloud-storage-2.7.0/google/cloud/storage/client.py
--- old/google-cloud-storage-2.6.0/google/cloud/storage/client.py
2022-11-07 22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google/cloud/storage/client.py
2022-12-07 02:16:52.000000000 +0100
@@ -34,7 +34,6 @@
from google.cloud.storage._helpers import _get_default_headers
from google.cloud.storage._helpers import _get_environ_project
from google.cloud.storage._helpers import _get_storage_host
-from google.cloud.storage._helpers import _BASE_STORAGE_URI
from google.cloud.storage._helpers import _DEFAULT_STORAGE_HOST
from google.cloud.storage._helpers import _bucket_bound_hostname_url
from google.cloud.storage._helpers import _add_etag_match_headers
@@ -96,6 +95,12 @@
:type client_options:
:class:`~google.api_core.client_options.ClientOptions` or :class:`dict`
:param client_options: (Optional) Client options used to set user options
on the client.
API Endpoint should be set through client_options.
+
+ :type use_auth_w_custom_endpoint: bool
+ :param use_auth_w_custom_endpoint:
+ (Optional) Whether authentication is required under custom endpoints.
+ If false, uses AnonymousCredentials and bypasses authentication.
+ Defaults to True. Note this is only used when a custom endpoint is set
in conjunction.
"""
SCOPE = (
@@ -112,6 +117,7 @@
_http=None,
client_info=None,
client_options=None,
+ use_auth_w_custom_endpoint=True,
):
self._base_connection = None
@@ -127,13 +133,12 @@
kw_args = {"client_info": client_info}
# `api_endpoint` should be only set by the user via `client_options`,
- # or if the _get_storage_host() returns a non-default value.
+ # or if the _get_storage_host() returns a non-default value
(_is_emulator_set).
# `api_endpoint` plays an important role for mTLS, if it is not set,
# then mTLS logic will be applied to decide which endpoint will be
used.
storage_host = _get_storage_host()
- kw_args["api_endpoint"] = (
- storage_host if storage_host != _DEFAULT_STORAGE_HOST else None
- )
+ _is_emulator_set = storage_host != _DEFAULT_STORAGE_HOST
+ kw_args["api_endpoint"] = storage_host if _is_emulator_set else None
if client_options:
if type(client_options) == dict:
@@ -144,19 +149,20 @@
api_endpoint = client_options.api_endpoint
kw_args["api_endpoint"] = api_endpoint
- # Use anonymous credentials and no project when
- # STORAGE_EMULATOR_HOST or a non-default api_endpoint is set.
- if (
- kw_args["api_endpoint"] is not None
- and _BASE_STORAGE_URI not in kw_args["api_endpoint"]
- ):
- if credentials is None:
- credentials = AnonymousCredentials()
- if project is None:
- project = _get_environ_project()
- if project is None:
- no_project = True
- project = "<none>"
+ # If a custom endpoint is set, the client checks for credentials
+ # or finds the default credentials based on the current environment.
+ # Authentication may be bypassed under certain conditions:
+ # (1) STORAGE_EMULATOR_HOST is set (for backwards compatibility), OR
+ # (2) use_auth_w_custom_endpoint is set to False.
+ if kw_args["api_endpoint"] is not None:
+ if _is_emulator_set or not use_auth_w_custom_endpoint:
+ if credentials is None:
+ credentials = AnonymousCredentials()
+ if project is None:
+ project = _get_environ_project()
+ if project is None:
+ no_project = True
+ project = "<none>"
super(Client, self).__init__(
project=project,
@@ -897,7 +903,8 @@
project = self.project
# Use no project if STORAGE_EMULATOR_HOST is set
- if _BASE_STORAGE_URI not in _get_storage_host():
+ _is_emulator_set = _get_storage_host() != _DEFAULT_STORAGE_HOST
+ if _is_emulator_set:
if project is None:
project = _get_environ_project()
if project is None:
@@ -1327,7 +1334,8 @@
project = self.project
# Use no project if STORAGE_EMULATOR_HOST is set
- if _BASE_STORAGE_URI not in _get_storage_host():
+ _is_emulator_set = _get_storage_host() != _DEFAULT_STORAGE_HOST
+ if _is_emulator_set:
if project is None:
project = _get_environ_project()
if project is None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google/cloud/storage/constants.py
new/google-cloud-storage-2.7.0/google/cloud/storage/constants.py
--- old/google-cloud-storage-2.6.0/google/cloud/storage/constants.py
2022-11-07 22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google/cloud/storage/constants.py
2022-12-07 02:16:52.000000000 +0100
@@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
"""Constants used across google.cloud.storage modules."""
# Storage classes
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google/cloud/storage/fileio.py
new/google-cloud-storage-2.7.0/google/cloud/storage/fileio.py
--- old/google-cloud-storage-2.6.0/google/cloud/storage/fileio.py
2022-11-07 22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google/cloud/storage/fileio.py
2022-12-07 02:16:52.000000000 +0100
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Support for file-like I/O."""
+"""Module for file-like access of blobs, usually invoked via Blob.open()."""
import io
import warnings
@@ -101,10 +101,12 @@
- ``if_metageneration_match``
- ``if_metageneration_not_match``
- ``timeout``
+
+ Note that download_kwargs are also applied to blob.reload(), if a
reload
+ is needed during seek().
"""
def __init__(self, blob, chunk_size=None, retry=DEFAULT_RETRY,
**download_kwargs):
- """docstring note that download_kwargs also used for reload()"""
for kwarg in download_kwargs:
if kwarg not in VALID_DOWNLOAD_KWARGS:
raise ValueError(
@@ -209,9 +211,9 @@
def close(self):
self._buffer.close()
- def _checkClosed(self):
- if self._buffer.closed:
- raise ValueError("I/O operation on closed file.")
+ @property
+ def closed(self):
+ return self._buffer.closed
def readable(self):
return True
@@ -429,9 +431,9 @@
self._upload_chunks_from_buffer(1)
self._buffer.close()
- def _checkClosed(self):
- if self._buffer.closed:
- raise ValueError("I/O operation on closed file.")
+ @property
+ def closed(self):
+ return self._buffer.closed
def readable(self):
return False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google/cloud/storage/transfer_manager.py
new/google-cloud-storage-2.7.0/google/cloud/storage/transfer_manager.py
--- old/google-cloud-storage-2.6.0/google/cloud/storage/transfer_manager.py
1970-01-01 01:00:00.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google/cloud/storage/transfer_manager.py
2022-12-07 02:16:52.000000000 +0100
@@ -0,0 +1,501 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Concurrent media operations. This is a PREVIEW FEATURE: API may change."""
+
+import concurrent.futures
+
+import os
+import warnings
+
+from google.api_core import exceptions
+
+warnings.warn(
+ "The module `transfer_manager` is a preview feature. Functionality and API
"
+ "may change. This warning will be removed in a future release."
+)
+
+
+DEFAULT_CHUNK_SIZE = 200 * 1024 * 1024
+
+
+def upload_many(
+ file_blob_pairs,
+ skip_if_exists=False,
+ upload_kwargs=None,
+ threads=4,
+ deadline=None,
+ raise_exception=False,
+):
+ """Upload many files concurrently via a worker pool.
+
+ This function is a PREVIEW FEATURE: the API may change in a future version.
+
+ :type file_blob_pairs: List(Tuple(IOBase or str,
'google.cloud.storage.blob.Blob'))
+ :param file_blob_pairs:
+ A list of tuples of a file or filename and a blob. Each file will be
+ uploaded to the corresponding blob by using blob.upload_from_file() or
+ blob.upload_from_filename() as appropriate.
+
+ :type skip_if_exists: bool
+ :param skip_if_exists:
+ If True, blobs that already have a live version will not be
overwritten.
+ This is accomplished by setting "if_generation_match = 0" on uploads.
+ Uploads so skipped will result in a 412 Precondition Failed response
+ code, which will be included in the return value but not raised
+ as an exception regardless of the value of raise_exception.
+
+ :type upload_kwargs: dict
+ :param upload_kwargs:
+ A dictionary of keyword arguments to pass to the upload method. Refer
+ to the documentation for blob.upload_from_file() or
+ blob.upload_from_filename() for more information. The dict is directly
+ passed into the upload methods and is not validated by this function.
+
+ :type threads: int
+ :param threads:
+ The number of threads to use in the worker pool. This is passed to
+ `concurrent.futures.ThreadPoolExecutor` as the `max_worker`; refer
+ to standard library documentation for details.
+
+ The performance impact of this value depends on the use case, but
+ generally, smaller files benefit from more threads and larger files
+ don't benefit from more threads. Too many threads can slow operations,
+ especially with large files, due to contention over the Python GIL.
+
+ :type deadline: int
+ :param deadline:
+ The number of seconds to wait for all threads to resolve. If the
+ deadline is reached, all threads will be terminated regardless of their
+ progress and concurrent.futures.TimeoutError will be raised. This can
be
+ left as the default of None (no deadline) for most use cases.
+
+ :type raise_exception: bool
+ :param raise_exception:
+ If True, instead of adding exceptions to the list of return values,
+ instead they will be raised. Note that encountering an exception on one
+ operation will not prevent other operations from starting. Exceptions
+ are only processed and potentially raised after all operations are
+ complete in success or failure.
+
+ If skip_if_exists is True, 412 Precondition Failed responses are
+ considered part of normal operation and are not raised as an exception.
+
+ :raises: :exc:`concurrent.futures.TimeoutError` if deadline is exceeded.
+
+ :rtype: list
+ :returns: A list of results corresponding to, in order, each item in the
+ input list. If an exception was received, it will be the result
+ for that operation. Otherwise, the return value from the successful
+ upload method is used (typically, None).
+ """
+ if upload_kwargs is None:
+ upload_kwargs = {}
+ if skip_if_exists:
+ upload_kwargs["if_generation_match"] = 0
+
+ with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as
executor:
+ futures = []
+ for path_or_file, blob in file_blob_pairs:
+ method = (
+ blob.upload_from_filename
+ if isinstance(path_or_file, str)
+ else blob.upload_from_file
+ )
+ futures.append(executor.submit(method, path_or_file,
**upload_kwargs))
+ results = []
+ concurrent.futures.wait(
+ futures, timeout=deadline, return_when=concurrent.futures.ALL_COMPLETED
+ )
+ for future in futures:
+ exp = future.exception()
+
+ # If raise_exception is False, don't call future.result()
+ if exp and not raise_exception:
+ results.append(exp)
+ # If skip_if_exists and the exception is PreconditionFailed, do same.
+ elif exp and skip_if_exists and isinstance(exp,
exceptions.PreconditionFailed):
+ results.append(exp)
+ # Get the real result. If there was an exception not handled above,
+ # this will raise it.
+ else:
+ results.append(future.result())
+ return results
+
+
+def download_many(
+ blob_file_pairs,
+ download_kwargs=None,
+ threads=4,
+ deadline=None,
+ raise_exception=False,
+):
+ """Download many blobs concurrently via a worker pool.
+
+ This function is a PREVIEW FEATURE: the API may change in a future version.
+
+ :type blob_file_pairs: List(Tuple('google.cloud.storage.blob.Blob', IOBase
or str))
+ :param blob_file_pairs:
+ A list of tuples of blob and a file or filename. Each blob will be
+ downloaded to the corresponding blob by using blob.download_to_file()
or
+ blob.download_to_filename() as appropriate.
+
+ Note that blob.download_to_filename() does not delete the destination
+ file if the download fails.
+
+ :type download_kwargs: dict
+ :param download_kwargs:
+ A dictionary of keyword arguments to pass to the download method. Refer
+ to the documentation for blob.download_to_file() or
+ blob.download_to_filename() for more information. The dict is directly
+ passed into the download methods and is not validated by this function.
+
+ :type threads: int
+ :param threads:
+ The number of threads to use in the worker pool. This is passed to
+ `concurrent.futures.ThreadPoolExecutor` as the `max_worker`; refer
+ to standard library documentation for details.
+
+ The performance impact of this value depends on the use case, but
+ generally, smaller files benefit from more threads and larger files
+ don't benefit from more threads. Too many threads can slow operations,
+ especially with large files, due to contention over the Python GIL.
+
+ :type deadline: int
+ :param deadline:
+ The number of seconds to wait for all threads to resolve. If the
+ deadline is reached, all threads will be terminated regardless of their
+ progress and concurrent.futures.TimeoutError will be raised. This can
be
+ left as the default of None (no deadline) for most use cases.
+
+ :type raise_exception: bool
+ :param raise_exception:
+ If True, instead of adding exceptions to the list of return values,
+ instead they will be raised. Note that encountering an exception on one
+ operation will not prevent other operations from starting. Exceptions
+ are only processed and potentially raised after all operations are
+ complete in success or failure.
+
+ :raises: :exc:`concurrent.futures.TimeoutError` if deadline is exceeded.
+
+ :rtype: list
+ :returns: A list of results corresponding to, in order, each item in the
+ input list. If an exception was received, it will be the result
+ for that operation. Otherwise, the return value from the successful
+ download method is used (typically, None).
+ """
+
+ if download_kwargs is None:
+ download_kwargs = {}
+ with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as
executor:
+ futures = []
+ for blob, path_or_file in blob_file_pairs:
+ method = (
+ blob.download_to_filename
+ if isinstance(path_or_file, str)
+ else blob.download_to_file
+ )
+ futures.append(executor.submit(method, path_or_file,
**download_kwargs))
+ results = []
+ concurrent.futures.wait(
+ futures, timeout=deadline, return_when=concurrent.futures.ALL_COMPLETED
+ )
+ for future in futures:
+ if not raise_exception:
+ exp = future.exception()
+ if exp:
+ results.append(exp)
+ continue
+ results.append(future.result())
+ return results
+
+
+def upload_many_from_filenames(
+ bucket,
+ filenames,
+ source_directory="",
+ blob_name_prefix="",
+ skip_if_exists=False,
+ blob_constructor_kwargs=None,
+ upload_kwargs=None,
+ threads=4,
+ deadline=None,
+ raise_exception=False,
+):
+ """Upload many files concurrently by their filenames.
+
+ This function is a PREVIEW FEATURE: the API may change in a future version.
+
+ The destination blobs are automatically created, with blob names based on
+ the source filenames and the blob_name_prefix.
+
+ For example, if the `filenames` include "images/icon.jpg",
+ `source_directory` is "/home/myuser/", and `blob_name_prefix` is
"myfiles/",
+ then the file at "/home/myuser/images/icon.jpg" will be uploaded to a blob
+ named "myfiles/images/icon.jpg".
+
+ :type bucket: 'google.cloud.storage.bucket.Bucket'
+ :param bucket:
+ The bucket which will contain the uploaded blobs.
+
+ :type filenames: list(str)
+ :param filenames:
+ A list of filenames to be uploaded. This may include part of the path.
+ The full path to the file must be source_directory + filename.
+
+ :type source_directory: str
+ :param source_directory:
+ A string that will be prepended (with os.path.join()) to each filename
+ in the input list, in order to find the source file for each blob.
+ Unlike the filename itself, the source_directory does not affect the
+ name of the uploaded blob.
+
+ For instance, if the source_directory is "/tmp/img/" and a filename is
+ "0001.jpg", with an empty blob_name_prefix, then the file uploaded will
+ be "/tmp/img/0001.jpg" and the destination blob will be "0001.jpg".
+
+ This parameter can be an empty string.
+
+ Note that this parameter allows directory traversal (e.g. "/", "../")
+ and is not intended for unsanitized end user input.
+
+ :type blob_name_prefix: str
+ :param blob_name_prefix:
+ A string that will be prepended to each filename in the input list, in
+ order to determine the name of the destination blob. Unlike the
filename
+ itself, the prefix string does not affect the location the library will
+ look for the source data on the local filesystem.
+
+ For instance, if the source_directory is "/tmp/img/", the
+ blob_name_prefix is "myuser/mystuff-" and a filename is "0001.jpg" then
+ the file uploaded will be "/tmp/img/0001.jpg" and the destination blob
+ will be "myuser/mystuff-0001.jpg".
+
+ The blob_name_prefix can be blank (an empty string).
+
+ :type skip_if_exists: bool
+ :param skip_if_exists:
+ If True, blobs that already have a live version will not be
overwritten.
+ This is accomplished by setting "if_generation_match = 0" on uploads.
+ Uploads so skipped will result in a 412 Precondition Failed response
+ code, which will be included in the return value, but not raised
+ as an exception regardless of the value of raise_exception.
+
+ :type blob_constructor_kwargs: dict
+ :param blob_constructor_kwargs:
+ A dictionary of keyword arguments to pass to the blob constructor.
Refer
+ to the documentation for blob.Blob() for more information. The dict is
+ directly passed into the constructor and is not validated by this
+ function. `name` and `bucket` keyword arguments are reserved by this
+ function and will result in an error if passed in here.
+
+ :type upload_kwargs: dict
+ :param upload_kwargs:
+ A dictionary of keyword arguments to pass to the upload method. Refer
+ to the documentation for blob.upload_from_file() or
+ blob.upload_from_filename() for more information. The dict is directly
+ passed into the upload methods and is not validated by this function.
+
+ :type threads: int
+ :param threads:
+ The number of threads to use in the worker pool. This is passed to
+ `concurrent.futures.ThreadPoolExecutor` as the `max_worker`; refer
+ to standard library documentation for details.
+
+ The performance impact of this value depends on the use case, but
+ generally, smaller files benefit from more threads and larger files
+ don't benefit from more threads. Too many threads can slow operations,
+ especially with large files, due to contention over the Python GIL.
+
+ :type deadline: int
+ :param deadline:
+ The number of seconds to wait for all threads to resolve. If the
+ deadline is reached, all threads will be terminated regardless of their
+ progress and concurrent.futures.TimeoutError will be raised. This can
be
+ left as the default of None (no deadline) for most use cases.
+
+ :type raise_exception: bool
+ :param raise_exception:
+ If True, instead of adding exceptions to the list of return values,
+ instead they will be raised. Note that encountering an exception on one
+ operation will not prevent other operations from starting. Exceptions
+ are only processed and potentially raised after all operations are
+ complete in success or failure.
+
+ If skip_if_exists is True, 412 Precondition Failed responses are
+ considered part of normal operation and are not raised as an exception.
+
+ :raises: :exc:`concurrent.futures.TimeoutError` if deadline is exceeded.
+
+ :rtype: list
+ :returns: A list of results corresponding to, in order, each item in the
+ input list. If an exception was received, it will be the result
+ for that operation. Otherwise, the return value from the successful
+ upload method is used (typically, None).
+ """
+ if blob_constructor_kwargs is None:
+ blob_constructor_kwargs = {}
+
+ file_blob_pairs = []
+
+ for filename in filenames:
+ path = os.path.join(source_directory, filename)
+ blob_name = blob_name_prefix + filename
+ blob = bucket.blob(blob_name, **blob_constructor_kwargs)
+ file_blob_pairs.append((path, blob))
+
+ return upload_many(
+ file_blob_pairs,
+ skip_if_exists=skip_if_exists,
+ upload_kwargs=upload_kwargs,
+ threads=threads,
+ deadline=deadline,
+ raise_exception=raise_exception,
+ )
+
+
+def download_many_to_path(
+ bucket,
+ blob_names,
+ destination_directory="",
+ blob_name_prefix="",
+ download_kwargs=None,
+ threads=4,
+ deadline=None,
+ create_directories=True,
+ raise_exception=False,
+):
+ """Download many files concurrently by their blob names.
+
+ This function is a PREVIEW FEATURE: the API may change in a future version.
+
+ The destination files are automatically created, with paths based on the
+ source blob_names and the destination_directory.
+
+ The destination files are not automatically deleted if their downloads
fail,
+ so please check the return value of this function for any exceptions, or
+ enable `raise_exception=True`, and process the files accordingly.
+
+ For example, if the `blob_names` include "icon.jpg",
`destination_directory`
+ is "/home/myuser/", and `blob_name_prefix` is "images/", then the blob
named
+ "images/icon.jpg" will be downloaded to a file named
+ "/home/myuser/icon.jpg".
+
+ :type bucket: 'google.cloud.storage.bucket.Bucket'
+ :param bucket:
+ The bucket which contains the blobs to be downloaded
+
+ :type blob_names: list(str)
+ :param blob_names:
+ A list of blobs to be downloaded. The blob name in this string will be
+ used to determine the destination file path as well.
+
+ The full name to the blob must be blob_name_prefix + blob_name. The
+ blob_name is separate from the blob_name_prefix because the blob_name
+ will also determine the name of the destination blob. Any shared part
of
+ the blob names that need not be part of the destination path should be
+ included in the blob_name_prefix.
+
+ :type destination_directory: str
+ :param destination_directory:
+ A string that will be prepended (with os.path.join()) to each blob_name
+ in the input list, in order to determine the destination path for that
+ blob.
+
+ For instance, if the destination_directory string is "/tmp/img" and a
+ blob_name is "0001.jpg", with an empty blob_name_prefix, then the
source
+ blob "0001.jpg" will be downloaded to destination "/tmp/img/0001.jpg" .
+
+ This parameter can be an empty string.
+
+ Note that this parameter allows directory traversal (e.g. "/", "../")
+ and is not intended for unsanitized end user input.
+
+ :type blob_name_prefix: str
+ :param blob_name_prefix:
+ A string that will be prepended to each blob_name in the input list, in
+ order to determine the name of the source blob. Unlike the blob_name
+ itself, the prefix string does not affect the destination path on the
+ local filesystem. For instance, if the destination_directory is
+ "/tmp/img/", the blob_name_prefix is "myuser/mystuff-" and a blob_name
+ is "0001.jpg" then the source blob "myuser/mystuff-0001.jpg" will be
+ downloaded to "/tmp/img/0001.jpg". The blob_name_prefix can be blank
+ (an empty string).
+
+ :type download_kwargs: dict
+ :param download_kwargs:
+ A dictionary of keyword arguments to pass to the download method. Refer
+ to the documentation for blob.download_to_file() or
+ blob.download_to_filename() for more information. The dict is directly
+ passed into the download methods and is not validated by this function.
+
+ :type threads: int
+ :param threads:
+ The number of threads to use in the worker pool. This is passed to
+ `concurrent.futures.ThreadPoolExecutor` as the `max_worker` param;
refer
+ to standard library documentation for details.
+
+ The performance impact of this value depends on the use case, but
+ generally, smaller files benefit from more threads and larger files
+ don't benefit from more threads. Too many threads can slow operations,
+ especially with large files, due to contention over the Python GIL.
+
+ :type deadline: int
+ :param deadline:
+ The number of seconds to wait for all threads to resolve. If the
+ deadline is reached, all threads will be terminated regardless of their
+ progress and concurrent.futures.TimeoutError will be raised. This can
be
+ left as the default of None (no deadline) for most use cases.
+
+ :type create_directories: bool
+ :param create_directories:
+ If True, recursively create any directories that do not exist. For
+ instance, if downloading object "images/img001.png", create the
+ directory "images" before downloading.
+
+ :type raise_exception: bool
+ :param raise_exception:
+ If True, instead of adding exceptions to the list of return values,
+ instead they will be raised. Note that encountering an exception on one
+ operation will not prevent other operations from starting. Exceptions
+ are only processed and potentially raised after all operations are
+ complete in success or failure. If skip_if_exists is True, 412
+ Precondition Failed responses are considered part of normal operation
+ and are not raised as an exception.
+
+ :raises: :exc:`concurrent.futures.TimeoutError` if deadline is exceeded.
+
+ :rtype: list
+ :returns: A list of results corresponding to, in order, each item in the
+ input list. If an exception was received, it will be the result
+ for that operation. Otherwise, the return value from the successful
+ download method is used (typically, None).
+ """
+ blob_file_pairs = []
+
+ for blob_name in blob_names:
+ full_blob_name = blob_name_prefix + blob_name
+ path = os.path.join(destination_directory, blob_name)
+ if create_directories:
+ directory, _ = os.path.split(path)
+ os.makedirs(directory, exist_ok=True)
+ blob_file_pairs.append((bucket.blob(full_blob_name), path))
+
+ return download_many(
+ blob_file_pairs,
+ download_kwargs=download_kwargs,
+ threads=threads,
+ deadline=deadline,
+ raise_exception=raise_exception,
+ )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google/cloud/storage/version.py
new/google-cloud-storage-2.7.0/google/cloud/storage/version.py
--- old/google-cloud-storage-2.6.0/google/cloud/storage/version.py
2022-11-07 22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google/cloud/storage/version.py
2022-12-07 02:16:52.000000000 +0100
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.6.0"
+__version__ = "2.7.0"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google_cloud_storage.egg-info/PKG-INFO
new/google-cloud-storage-2.7.0/google_cloud_storage.egg-info/PKG-INFO
--- old/google-cloud-storage-2.6.0/google_cloud_storage.egg-info/PKG-INFO
2022-11-07 22:52:41.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google_cloud_storage.egg-info/PKG-INFO
2022-12-07 02:20:04.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: google-cloud-storage
-Version: 2.6.0
+Version: 2.7.0
Summary: Google Cloud Storage API client library
Home-page: https://github.com/googleapis/python-storage
Author: Google LLC
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/google_cloud_storage.egg-info/SOURCES.txt
new/google-cloud-storage-2.7.0/google_cloud_storage.egg-info/SOURCES.txt
--- old/google-cloud-storage-2.6.0/google_cloud_storage.egg-info/SOURCES.txt
2022-11-07 22:52:41.000000000 +0100
+++ new/google-cloud-storage-2.7.0/google_cloud_storage.egg-info/SOURCES.txt
2022-12-07 02:20:04.000000000 +0100
@@ -20,6 +20,7 @@
google/cloud/storage/iam.py
google/cloud/storage/notification.py
google/cloud/storage/retry.py
+google/cloud/storage/transfer_manager.py
google/cloud/storage/version.py
google_cloud_storage.egg-info/PKG-INFO
google_cloud_storage.egg-info/SOURCES.txt
@@ -50,6 +51,7 @@
tests/system/test_hmac_key_metadata.py
tests/system/test_kms_integration.py
tests/system/test_notification.py
+tests/system/test_transfer_manager.py
tests/unit/__init__.py
tests/unit/test__helpers.py
tests/unit/test__http.py
@@ -63,5 +65,6 @@
tests/unit/test_hmac_key.py
tests/unit/test_notification.py
tests/unit/test_retry.py
+tests/unit/test_transfer_manager.py
tests/unit/url_signer_v4_test_account.json
tests/unit/url_signer_v4_test_data.json
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/tests/system/test_bucket.py
new/google-cloud-storage-2.7.0/tests/system/test_bucket.py
--- old/google-cloud-storage-2.6.0/tests/system/test_bucket.py 2022-11-07
22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/tests/system/test_bucket.py 2022-12-07
02:16:52.000000000 +0100
@@ -626,7 +626,7 @@
buckets_to_delete,
blobs_to_delete,
):
- period_secs = 10
+ period_secs = 3
bucket_name = _helpers.unique_name("w-retention-period")
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name)
buckets_to_delete.append(bucket)
@@ -679,6 +679,8 @@
assert not other.temporary_hold
assert other.retention_expiration_time is None
+ # Object can be deleted once it reaches the age defined in the retention
policy.
+ _helpers.await_config_changes_propagate(sec=period_secs)
other.delete()
blobs_to_delete.pop()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/tests/system/test_transfer_manager.py
new/google-cloud-storage-2.7.0/tests/system/test_transfer_manager.py
--- old/google-cloud-storage-2.6.0/tests/system/test_transfer_manager.py
1970-01-01 01:00:00.000000000 +0100
+++ new/google-cloud-storage-2.7.0/tests/system/test_transfer_manager.py
2022-12-07 02:16:52.000000000 +0100
@@ -0,0 +1,84 @@
+# coding=utf-8
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import tempfile
+
+from google.cloud.storage import transfer_manager
+
+from google.api_core import exceptions
+
+
+def test_upload_many(shared_bucket, file_data, blobs_to_delete):
+ FILE_BLOB_PAIRS = [
+ (file_data["simple"]["path"], shared_bucket.blob("simple1")),
+ (file_data["simple"]["path"], shared_bucket.blob("simple2")),
+ ]
+
+ results = transfer_manager.upload_many(FILE_BLOB_PAIRS)
+ assert results == [None, None]
+
+ blobs = shared_bucket.list_blobs()
+ for blob in blobs:
+ if blob.name.startswith("simple"):
+ blobs_to_delete.append(blob)
+ assert len(blobs_to_delete) == 2
+
+
+def test_upload_many_with_file_objs(shared_bucket, file_data, blobs_to_delete):
+ FILE_BLOB_PAIRS = [
+ (open(file_data["simple"]["path"], "rb"),
shared_bucket.blob("simple1")),
+ (open(file_data["simple"]["path"], "rb"),
shared_bucket.blob("simple2")),
+ ]
+
+ results = transfer_manager.upload_many(FILE_BLOB_PAIRS)
+ assert results == [None, None]
+
+ blobs = shared_bucket.list_blobs()
+ for blob in blobs:
+ if blob.name.startswith("simple"):
+ blobs_to_delete.append(blob)
+ assert len(blobs_to_delete) == 2
+
+
+def test_upload_many_skip_if_exists(
+ listable_bucket, listable_filenames, file_data, blobs_to_delete
+):
+ FILE_BLOB_PAIRS = [
+ (file_data["logo"]["path"],
listable_bucket.blob(listable_filenames[0])),
+ (file_data["simple"]["path"], listable_bucket.blob("simple")),
+ ]
+
+ results = transfer_manager.upload_many(
+ FILE_BLOB_PAIRS, skip_if_exists=True, raise_exception=True
+ )
+ assert isinstance(results[0], exceptions.PreconditionFailed)
+ assert results[1] is None
+
+ blobs = listable_bucket.list_blobs()
+ for blob in blobs:
+ if blob.name.startswith("simple"):
+ blobs_to_delete.append(blob)
+ assert len(blobs_to_delete) == 1
+
+
+def test_download_many(listable_bucket):
+ blobs = list(listable_bucket.list_blobs())
+ tempfiles = [tempfile.TemporaryFile(), tempfile.TemporaryFile()]
+ BLOB_FILE_PAIRS = zip(blobs[:2], tempfiles)
+
+ results = transfer_manager.download_many(BLOB_FILE_PAIRS)
+ assert results == [None, None]
+ for fp in tempfiles:
+ assert fp.tell() != 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/google-cloud-storage-2.6.0/tests/unit/test_client.py
new/google-cloud-storage-2.7.0/tests/unit/test_client.py
--- old/google-cloud-storage-2.6.0/tests/unit/test_client.py 2022-11-07
22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/tests/unit/test_client.py 2022-12-07
02:16:52.000000000 +0100
@@ -28,9 +28,10 @@
from google.auth.credentials import AnonymousCredentials
from google.oauth2.service_account import Credentials
+from google.cloud.storage import _helpers
from google.cloud.storage._helpers import STORAGE_EMULATOR_ENV_VAR
from google.cloud.storage._helpers import _get_default_headers
-from google.cloud.storage import _helpers
+from google.cloud.storage._http import Connection
from google.cloud.storage.retry import DEFAULT_RETRY
from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED
from tests.unit.test__helpers import GCCL_INVOCATION_TEST_CONST
@@ -119,7 +120,6 @@
def test_ctor_connection_type(self):
from google.cloud._http import ClientInfo
- from google.cloud.storage._http import Connection
PROJECT = "PROJECT"
credentials = _make_credentials()
@@ -179,8 +179,6 @@
)
def test_ctor_wo_project(self):
- from google.cloud.storage._http import Connection
-
PROJECT = "PROJECT"
credentials = _make_credentials(project=PROJECT)
@@ -193,8 +191,6 @@
self.assertEqual(list(client._batch_stack), [])
def test_ctor_w_project_explicit_none(self):
- from google.cloud.storage._http import Connection
-
credentials = _make_credentials()
client = self._make_one(project=None, credentials=credentials)
@@ -207,7 +203,6 @@
def test_ctor_w_client_info(self):
from google.cloud._http import ClientInfo
- from google.cloud.storage._http import Connection
credentials = _make_credentials()
client_info = ClientInfo()
@@ -239,8 +234,40 @@
self.assertEqual(client._connection.ALLOW_AUTO_SWITCH_TO_MTLS_URL,
False)
self.assertEqual(client._connection.API_BASE_URL, "http://foo")
+ def test_ctor_w_custom_endpoint_use_auth(self):
+ custom_endpoint = "storage-example.p.googleapis.com"
+ client = self._make_one(client_options={"api_endpoint":
custom_endpoint})
+ self.assertEqual(client._connection.API_BASE_URL, custom_endpoint)
+ self.assertIsNotNone(client.project)
+ self.assertIsInstance(client._connection, Connection)
+ self.assertIsNotNone(client._connection.credentials)
+ self.assertNotIsInstance(client._connection.credentials,
AnonymousCredentials)
+
+ def test_ctor_w_custom_endpoint_bypass_auth(self):
+ custom_endpoint = "storage-example.p.googleapis.com"
+ client = self._make_one(
+ client_options={"api_endpoint": custom_endpoint},
+ use_auth_w_custom_endpoint=False,
+ )
+ self.assertEqual(client._connection.API_BASE_URL, custom_endpoint)
+ self.assertEqual(client.project, None)
+ self.assertIsInstance(client._connection, Connection)
+ self.assertIsInstance(client._connection.credentials,
AnonymousCredentials)
+
+ def test_ctor_w_custom_endpoint_w_credentials(self):
+ PROJECT = "PROJECT"
+ custom_endpoint = "storage-example.p.googleapis.com"
+ credentials = _make_credentials(project=PROJECT)
+ client = self._make_one(
+ credentials=credentials, client_options={"api_endpoint":
custom_endpoint}
+ )
+ self.assertEqual(client._connection.API_BASE_URL, custom_endpoint)
+ self.assertEqual(client.project, PROJECT)
+ self.assertIsInstance(client._connection, Connection)
+ self.assertIs(client._connection.credentials, credentials)
+
def test_ctor_w_emulator_wo_project(self):
- # avoids authentication if STORAGE_EMULATOR_ENV_VAR is set
+ # bypasses authentication if STORAGE_EMULATOR_ENV_VAR is set
host = "http://localhost:8080"
environ = {STORAGE_EMULATOR_ENV_VAR: host}
with mock.patch("os.environ", environ):
@@ -250,16 +277,8 @@
self.assertEqual(client._connection.API_BASE_URL, host)
self.assertIsInstance(client._connection.credentials,
AnonymousCredentials)
- # avoids authentication if storage emulator is set through api_endpoint
- client = self._make_one(
- client_options={"api_endpoint": "http://localhost:8080"}
- )
- self.assertIsNone(client.project)
- self.assertEqual(client._connection.API_BASE_URL, host)
- self.assertIsInstance(client._connection.credentials,
AnonymousCredentials)
-
def test_ctor_w_emulator_w_environ_project(self):
- # avoids authentication and infers the project from the environment
+ # bypasses authentication and infers the project from the environment
host = "http://localhost:8080"
environ_project = "environ-project"
environ = {
@@ -289,9 +308,17 @@
self.assertEqual(client._connection.API_BASE_URL, host)
self.assertIsInstance(client._connection.credentials,
AnonymousCredentials)
- def test_create_anonymous_client(self):
- from google.cloud.storage._http import Connection
+ def test_ctor_w_emulator_w_credentials(self):
+ host = "http://localhost:8080"
+ environ = {STORAGE_EMULATOR_ENV_VAR: host}
+ credentials = _make_credentials()
+ with mock.patch("os.environ", environ):
+ client = self._make_one(credentials=credentials)
+ self.assertEqual(client._connection.API_BASE_URL, host)
+ self.assertIs(client._connection.credentials, credentials)
+
+ def test_create_anonymous_client(self):
klass = self._get_target_class()
client = klass.create_anonymous_client()
@@ -1269,6 +1296,28 @@
_target_object=bucket,
)
+ def test_create_bucket_w_custom_endpoint(self):
+ custom_endpoint = "storage-example.p.googleapis.com"
+ client = self._make_one(client_options={"api_endpoint":
custom_endpoint})
+ bucket_name = "bucket-name"
+ api_response = {"name": bucket_name}
+ client._post_resource = mock.Mock()
+ client._post_resource.return_value = api_response
+
+ bucket = client.create_bucket(bucket_name)
+
+ expected_path = "/b"
+ expected_data = api_response
+ expected_query_params = {"project": client.project}
+ client._post_resource.assert_called_once_with(
+ expected_path,
+ expected_data,
+ query_params=expected_query_params,
+ timeout=self._get_default_timeout(),
+ retry=DEFAULT_RETRY,
+ _target_object=bucket,
+ )
+
def test_create_bucket_w_conflict_w_user_project(self):
from google.cloud.exceptions import Conflict
@@ -2045,6 +2094,37 @@
"projection": "noAcl",
}
client._list_resource.assert_called_once_with(
+ expected_path,
+ expected_item_to_value,
+ page_token=expected_page_token,
+ max_results=expected_max_results,
+ extra_params=expected_extra_params,
+ page_size=expected_page_size,
+ timeout=self._get_default_timeout(),
+ retry=DEFAULT_RETRY,
+ )
+
+ def test_list_buckets_w_custom_endpoint(self):
+ from google.cloud.storage.client import _item_to_bucket
+
+ custom_endpoint = "storage-example.p.googleapis.com"
+ client = self._make_one(client_options={"api_endpoint":
custom_endpoint})
+ client._list_resource = mock.Mock(spec=[])
+
+ iterator = client.list_buckets()
+
+ self.assertIs(iterator, client._list_resource.return_value)
+
+ expected_path = "/b"
+ expected_item_to_value = _item_to_bucket
+ expected_page_token = None
+ expected_max_results = None
+ expected_page_size = None
+ expected_extra_params = {
+ "project": client.project,
+ "projection": "noAcl",
+ }
+ client._list_resource.assert_called_once_with(
expected_path,
expected_item_to_value,
page_token=expected_page_token,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/google-cloud-storage-2.6.0/tests/unit/test_fileio.py
new/google-cloud-storage-2.7.0/tests/unit/test_fileio.py
--- old/google-cloud-storage-2.6.0/tests/unit/test_fileio.py 2022-11-07
22:49:21.000000000 +0100
+++ new/google-cloud-storage-2.7.0/tests/unit/test_fileio.py 2022-12-07
02:16:52.000000000 +0100
@@ -287,6 +287,7 @@
reader = self._make_blob_reader(blob)
reader.close()
+ self.assertTrue(reader.closed)
with self.assertRaises(ValueError):
reader.read()
@@ -415,6 +416,8 @@
writer.close()
# Close a second time to verify it successfully does nothing.
writer.close()
+
+ self.assertTrue(writer.closed)
# Try to write to closed file.
with self.assertRaises(ValueError):
writer.write(TEST_BINARY_DATA)
@@ -767,6 +770,7 @@
def test_close(self):
buff = self._make_sliding_buffer()
buff.close()
+ self.assertTrue(buff.closed)
with self.assertRaises(ValueError):
buff.read()
@@ -913,6 +917,7 @@
reader = self._make_blob_reader(blob)
reader.close()
+ self.assertTrue(reader.closed)
with self.assertRaises(ValueError):
reader.read()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/google-cloud-storage-2.6.0/tests/unit/test_transfer_manager.py
new/google-cloud-storage-2.7.0/tests/unit/test_transfer_manager.py
--- old/google-cloud-storage-2.6.0/tests/unit/test_transfer_manager.py
1970-01-01 01:00:00.000000000 +0100
+++ new/google-cloud-storage-2.7.0/tests/unit/test_transfer_manager.py
2022-12-07 02:16:52.000000000 +0100
@@ -0,0 +1,335 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+with pytest.warns(UserWarning):
+ from google.cloud.storage import transfer_manager
+
+from google.api_core import exceptions
+
+import os
+import tempfile
+import unittest
+import mock
+
+
+class Test_Transfer_Manager(unittest.TestCase):
+ def test_upload_many_with_filenames(self):
+ FILE_BLOB_PAIRS = [("file_a.txt", mock.Mock()), ("file_b.txt",
mock.Mock())]
+ FAKE_CONTENT_TYPE = "text/fake"
+ UPLOAD_KWARGS = {"content-type": FAKE_CONTENT_TYPE}
+ EXPECTED_UPLOAD_KWARGS = {"if_generation_match": 0, **UPLOAD_KWARGS}
+ FAKE_RESULT = "nothing to see here"
+
+ for _, blob_mock in FILE_BLOB_PAIRS:
+ blob_mock.upload_from_filename.return_value = FAKE_RESULT
+
+ results = transfer_manager.upload_many(
+ FILE_BLOB_PAIRS, skip_if_exists=True, upload_kwargs=UPLOAD_KWARGS
+ )
+ for (filename, mock_blob) in FILE_BLOB_PAIRS:
+ mock_blob.upload_from_filename.assert_any_call(
+ filename, **EXPECTED_UPLOAD_KWARGS
+ )
+ for result in results:
+ self.assertEqual(result, FAKE_RESULT)
+
+ def test_upload_many_with_file_objs(self):
+ FILE_BLOB_PAIRS = [
+ (tempfile.TemporaryFile(), mock.Mock()),
+ (tempfile.TemporaryFile(), mock.Mock()),
+ ]
+ FAKE_CONTENT_TYPE = "text/fake"
+ UPLOAD_KWARGS = {"content-type": FAKE_CONTENT_TYPE}
+ EXPECTED_UPLOAD_KWARGS = {"if_generation_match": 0, **UPLOAD_KWARGS}
+ FAKE_RESULT = "nothing to see here"
+
+ for _, blob_mock in FILE_BLOB_PAIRS:
+ blob_mock.upload_from_file.return_value = FAKE_RESULT
+
+ results = transfer_manager.upload_many(
+ FILE_BLOB_PAIRS, skip_if_exists=True, upload_kwargs=UPLOAD_KWARGS
+ )
+ for (file, mock_blob) in FILE_BLOB_PAIRS:
+ mock_blob.upload_from_file.assert_any_call(file,
**EXPECTED_UPLOAD_KWARGS)
+ for result in results:
+ self.assertEqual(result, FAKE_RESULT)
+
+ def test_upload_many_passes_concurrency_options(self):
+ FILE_BLOB_PAIRS = [
+ (tempfile.TemporaryFile(), mock.Mock()),
+ (tempfile.TemporaryFile(), mock.Mock()),
+ ]
+ MAX_WORKERS = 7
+ DEADLINE = 10
+ with mock.patch(
+ "concurrent.futures.ThreadPoolExecutor"
+ ) as pool_patch, mock.patch("concurrent.futures.wait") as wait_patch:
+ transfer_manager.upload_many(
+ FILE_BLOB_PAIRS, threads=MAX_WORKERS, deadline=DEADLINE
+ )
+ pool_patch.assert_called_with(max_workers=MAX_WORKERS)
+ wait_patch.assert_called_with(
+ mock.ANY, timeout=DEADLINE, return_when=mock.ANY
+ )
+
+ def test_upload_many_suppresses_exceptions(self):
+ FILE_BLOB_PAIRS = [("file_a.txt", mock.Mock()), ("file_b.txt",
mock.Mock())]
+ for _, mock_blob in FILE_BLOB_PAIRS:
+ mock_blob.upload_from_filename.side_effect = ConnectionError()
+
+ results = transfer_manager.upload_many(FILE_BLOB_PAIRS)
+ for result in results:
+ self.assertEqual(type(result), ConnectionError)
+
+ def test_upload_many_raises_exceptions(self):
+ FILE_BLOB_PAIRS = [("file_a.txt", mock.Mock()), ("file_b.txt",
mock.Mock())]
+ for _, mock_blob in FILE_BLOB_PAIRS:
+ mock_blob.upload_from_filename.side_effect = ConnectionError()
+
+ with self.assertRaises(ConnectionError):
+ transfer_manager.upload_many(FILE_BLOB_PAIRS, raise_exception=True)
+
+ def test_upload_many_suppresses_412_with_skip_if_exists(self):
+ FILE_BLOB_PAIRS = [("file_a.txt", mock.Mock()), ("file_b.txt",
mock.Mock())]
+ for _, mock_blob in FILE_BLOB_PAIRS:
+ mock_blob.upload_from_filename.side_effect =
exceptions.PreconditionFailed(
+ "412"
+ )
+
+ results = transfer_manager.upload_many(
+ FILE_BLOB_PAIRS, skip_if_exists=True, raise_exception=True
+ )
+ for result in results:
+ self.assertEqual(type(result), exceptions.PreconditionFailed)
+
+ def test_download_many_with_filenames(self):
+ BLOB_FILE_PAIRS = [(mock.Mock(), "file_a.txt"), (mock.Mock(),
"file_b.txt")]
+ FAKE_ENCODING = "fake_gzip"
+ DOWNLOAD_KWARGS = {"accept-encoding": FAKE_ENCODING}
+ FAKE_RESULT = "nothing to see here"
+
+ for blob_mock, _ in BLOB_FILE_PAIRS:
+ blob_mock.download_to_filename.return_value = FAKE_RESULT
+
+ results = transfer_manager.download_many(
+ BLOB_FILE_PAIRS, download_kwargs=DOWNLOAD_KWARGS
+ )
+ for (mock_blob, file) in BLOB_FILE_PAIRS:
+ mock_blob.download_to_filename.assert_any_call(file,
**DOWNLOAD_KWARGS)
+ for result in results:
+ self.assertEqual(result, FAKE_RESULT)
+
+ def test_download_many_with_file_objs(self):
+ BLOB_FILE_PAIRS = [
+ (mock.Mock(), tempfile.TemporaryFile()),
+ (mock.Mock(), tempfile.TemporaryFile()),
+ ]
+ FAKE_ENCODING = "fake_gzip"
+ DOWNLOAD_KWARGS = {"accept-encoding": FAKE_ENCODING}
+ FAKE_RESULT = "nothing to see here"
+
+ for blob_mock, _ in BLOB_FILE_PAIRS:
+ blob_mock.download_to_file.return_value = FAKE_RESULT
+
+ results = transfer_manager.download_many(
+ BLOB_FILE_PAIRS, download_kwargs=DOWNLOAD_KWARGS
+ )
+ for (mock_blob, file) in BLOB_FILE_PAIRS:
+ mock_blob.download_to_file.assert_any_call(file, **DOWNLOAD_KWARGS)
+ for result in results:
+ self.assertEqual(result, FAKE_RESULT)
+
+ def test_download_many_passes_concurrency_options(self):
+ BLOB_FILE_PAIRS = [
+ (mock.Mock(), tempfile.TemporaryFile()),
+ (mock.Mock(), tempfile.TemporaryFile()),
+ ]
+ MAX_WORKERS = 7
+ DEADLINE = 10
+ with mock.patch(
+ "concurrent.futures.ThreadPoolExecutor"
+ ) as pool_patch, mock.patch("concurrent.futures.wait") as wait_patch:
+ transfer_manager.download_many(
+ BLOB_FILE_PAIRS, threads=MAX_WORKERS, deadline=DEADLINE
+ )
+ pool_patch.assert_called_with(max_workers=MAX_WORKERS)
+ wait_patch.assert_called_with(
+ mock.ANY, timeout=DEADLINE, return_when=mock.ANY
+ )
+
+ def test_download_many_suppresses_exceptions(self):
+ BLOB_FILE_PAIRS = [(mock.Mock(), "file_a.txt"), (mock.Mock(),
"file_b.txt")]
+ for mock_blob, _ in BLOB_FILE_PAIRS:
+ mock_blob.download_to_filename.side_effect = ConnectionError()
+
+ results = transfer_manager.download_many(BLOB_FILE_PAIRS)
+ for result in results:
+ self.assertEqual(type(result), ConnectionError)
+
+ def test_download_many_raises_exceptions(self):
+ BLOB_FILE_PAIRS = [(mock.Mock(), "file_a.txt"), (mock.Mock(),
"file_b.txt")]
+ for mock_blob, _ in BLOB_FILE_PAIRS:
+ mock_blob.download_to_filename.side_effect = ConnectionError()
+
+ transfer_manager.download_many(BLOB_FILE_PAIRS)
+ with self.assertRaises(ConnectionError):
+ transfer_manager.download_many(BLOB_FILE_PAIRS,
raise_exception=True)
+
+ def test_upload_many_from_filenames(self):
+ bucket = mock.Mock()
+
+ FILENAMES = ["file_a.txt", "file_b.txt"]
+ ROOT = "mypath/"
+ PREFIX = "myprefix/"
+ KEY_NAME = "keyname"
+ BLOB_CONSTRUCTOR_KWARGS = {"kms_key_name": KEY_NAME}
+ UPLOAD_KWARGS = {"content-type": "text/fake"}
+ MAX_WORKERS = 7
+ DEADLINE = 10
+
+ EXPECTED_FILE_BLOB_PAIRS = [
+ (os.path.join(ROOT, filename), mock.ANY) for filename in FILENAMES
+ ]
+
+ with mock.patch(
+ "google.cloud.storage.transfer_manager.upload_many"
+ ) as mock_upload_many:
+ transfer_manager.upload_many_from_filenames(
+ bucket,
+ FILENAMES,
+ source_directory=ROOT,
+ blob_name_prefix=PREFIX,
+ skip_if_exists=True,
+ blob_constructor_kwargs=BLOB_CONSTRUCTOR_KWARGS,
+ upload_kwargs=UPLOAD_KWARGS,
+ threads=MAX_WORKERS,
+ deadline=DEADLINE,
+ raise_exception=True,
+ )
+
+ mock_upload_many.assert_called_once_with(
+ EXPECTED_FILE_BLOB_PAIRS,
+ skip_if_exists=True,
+ upload_kwargs=UPLOAD_KWARGS,
+ threads=MAX_WORKERS,
+ deadline=DEADLINE,
+ raise_exception=True,
+ )
+ bucket.blob.assert_any_call(PREFIX + FILENAMES[0],
**BLOB_CONSTRUCTOR_KWARGS)
+ bucket.blob.assert_any_call(PREFIX + FILENAMES[1],
**BLOB_CONSTRUCTOR_KWARGS)
+
+ def test_upload_many_from_filenames_minimal_args(self):
+ bucket = mock.Mock()
+
+ FILENAMES = ["file_a.txt", "file_b.txt"]
+
+ EXPECTED_FILE_BLOB_PAIRS = [(filename, mock.ANY) for filename in
FILENAMES]
+
+ with mock.patch(
+ "google.cloud.storage.transfer_manager.upload_many"
+ ) as mock_upload_many:
+ transfer_manager.upload_many_from_filenames(
+ bucket,
+ FILENAMES,
+ )
+
+ mock_upload_many.assert_called_once_with(
+ EXPECTED_FILE_BLOB_PAIRS,
+ skip_if_exists=False,
+ upload_kwargs=None,
+ threads=4,
+ deadline=None,
+ raise_exception=False,
+ )
+ bucket.blob.assert_any_call(FILENAMES[0])
+ bucket.blob.assert_any_call(FILENAMES[1])
+
+ def test_download_many_to_path(self):
+ bucket = mock.Mock()
+
+ BLOBNAMES = ["file_a.txt", "file_b.txt", "dir_a/file_c.txt"]
+ PATH_ROOT = "mypath/"
+ BLOB_NAME_PREFIX = "myprefix/"
+ DOWNLOAD_KWARGS = {"accept-encoding": "fake-gzip"}
+ MAX_WORKERS = 7
+ DEADLINE = 10
+
+ EXPECTED_BLOB_FILE_PAIRS = [
+ (mock.ANY, os.path.join(PATH_ROOT, blobname)) for blobname in
BLOBNAMES
+ ]
+
+ with mock.patch(
+ "google.cloud.storage.transfer_manager.download_many"
+ ) as mock_download_many:
+ transfer_manager.download_many_to_path(
+ bucket,
+ BLOBNAMES,
+ destination_directory=PATH_ROOT,
+ blob_name_prefix=BLOB_NAME_PREFIX,
+ download_kwargs=DOWNLOAD_KWARGS,
+ threads=MAX_WORKERS,
+ deadline=DEADLINE,
+ create_directories=False,
+ raise_exception=True,
+ )
+
+ mock_download_many.assert_called_once_with(
+ EXPECTED_BLOB_FILE_PAIRS,
+ download_kwargs=DOWNLOAD_KWARGS,
+ threads=MAX_WORKERS,
+ deadline=DEADLINE,
+ raise_exception=True,
+ )
+ for blobname in BLOBNAMES:
+ bucket.blob.assert_any_call(BLOB_NAME_PREFIX + blobname)
+
+ def test_download_many_to_path_creates_directories(self):
+ bucket = mock.Mock()
+
+ with tempfile.TemporaryDirectory() as tempdir:
+ DIR_NAME = "dir_a/dir_b"
+ BLOBNAMES = [
+ "file_a.txt",
+ "file_b.txt",
+ os.path.join(DIR_NAME, "file_c.txt"),
+ ]
+
+ EXPECTED_BLOB_FILE_PAIRS = [
+ (mock.ANY, os.path.join(tempdir, blobname)) for blobname in
BLOBNAMES
+ ]
+
+ with mock.patch(
+ "google.cloud.storage.transfer_manager.download_many"
+ ) as mock_download_many:
+ transfer_manager.download_many_to_path(
+ bucket,
+ BLOBNAMES,
+ destination_directory=tempdir,
+ create_directories=True,
+ raise_exception=True,
+ )
+
+ mock_download_many.assert_called_once_with(
+ EXPECTED_BLOB_FILE_PAIRS,
+ download_kwargs=None,
+ threads=4,
+ deadline=None,
+ raise_exception=True,
+ )
+ for blobname in BLOBNAMES:
+ bucket.blob.assert_any_call(blobname)
+
+ assert os.path.isdir(os.path.join(tempdir, DIR_NAME))