Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-gcsfs for openSUSE:Factory 
checked in at 2022-04-28 23:07:47
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-gcsfs (Old)
 and      /work/SRC/openSUSE:Factory/.python-gcsfs.new.1538 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-gcsfs"

Thu Apr 28 23:07:47 2022 rev:10 rq:973297 version:2022.3.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-gcsfs/python-gcsfs.changes        
2022-02-24 18:24:05.206648580 +0100
+++ /work/SRC/openSUSE:Factory/.python-gcsfs.new.1538/python-gcsfs.changes      
2022-04-28 23:07:49.636678292 +0200
@@ -1,0 +2,11 @@
+Wed Apr 27 21:15:17 UTC 2022 - Ben Greiner <c...@bnavigator.de>
+
+- Update to 2022.3.0
+  * bucket exists workaround (#464)
+  * dirmarkers (#459)
+  * check connection (#457)
+  * browser connection now uses local server (#456)
+  * bucket location (#455)
+  * ensure auth is closed (#452)
+
+-------------------------------------------------------------------

Old:
----
  gcsfs-2022.02.0-gh.tar.gz

New:
----
  gcsfs-2022.3.0-gh.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-gcsfs.spec ++++++
--- /var/tmp/diff_new_pack.ywAV4N/_old  2022-04-28 23:07:50.120678819 +0200
+++ /var/tmp/diff_new_pack.ywAV4N/_new  2022-04-28 23:07:50.124678824 +0200
@@ -20,16 +20,15 @@
 # the test suite moved to a docker simulator which we cannot run inside an obs 
environment
 %bcond_with fulltest
 %define         skip_python2 1
-%define         ghversiontag 2022.02.0
 Name:           python-gcsfs
-Version:        2022.2.0
+Version:        2022.3.0
 Release:        0
 Summary:        Filesystem interface over GCS
 License:        BSD-3-Clause
 Group:          Development/Languages/Python
 URL:            https://github.com/fsspec/gcsfs
 # Use the GitHub tarball for test data
-Source:         
https://github.com/fsspec/gcsfs/archive/refs/tags/%{ghversiontag}.tar.gz#/gcsfs-%{ghversiontag}-gh.tar.gz
+Source:         
https://github.com/fsspec/gcsfs/archive/refs/tags/%{version}.tar.gz#/gcsfs-%{version}-gh.tar.gz
 BuildRequires:  %{python_module setuptools}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
@@ -77,7 +76,7 @@
 This package provides the optional FUSE interface.
 
 %prep
-%autosetup -p1 -n gcsfs-%{ghversiontag}
+%autosetup -p1 -n gcsfs-%{version}
 sed -i 's/--color=yes//' setup.cfg
 
 %build

++++++ gcsfs-2022.02.0-gh.tar.gz -> gcsfs-2022.3.0-gh.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/.pre-commit-config.yaml 
new/gcsfs-2022.3.0/.pre-commit-config.yaml
--- old/gcsfs-2022.02.0/.pre-commit-config.yaml 2022-02-22 18:57:08.000000000 
+0100
+++ new/gcsfs-2022.3.0/.pre-commit-config.yaml  2022-04-04 16:36:39.000000000 
+0200
@@ -7,7 +7,7 @@
       - id: trailing-whitespace
       - id: end-of-file-fixer
   - repo: https://github.com/ambv/black
-    rev: 20.8b1
+    rev: 22.3.0
     hooks:
     - id: black
   - repo: https://gitlab.com/pycqa/flake8
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/docs/source/changelog.rst 
new/gcsfs-2022.3.0/docs/source/changelog.rst
--- old/gcsfs-2022.02.0/docs/source/changelog.rst       2022-02-22 
18:57:08.000000000 +0100
+++ new/gcsfs-2022.3.0/docs/source/changelog.rst        2022-04-04 
16:36:39.000000000 +0200
@@ -1,6 +1,19 @@
 Changelog
 =========
 
+2022.3.0
+--------
+
+(note that this release happened in 2022.4, but we label as 2022.3 to match
+fsspec)
+
+* bucket exists workaround (#464)
+* dirmarkers (#459)
+* check connection (#457)
+* browser connection now uses local server (#456)
+* bucket location (#455)
+* ensure auth is closed (#452)
+
 2022.02.0
 ---------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/gcsfs/_version.py 
new/gcsfs-2022.3.0/gcsfs/_version.py
--- old/gcsfs-2022.02.0/gcsfs/_version.py       2022-02-22 18:57:08.000000000 
+0100
+++ new/gcsfs-2022.3.0/gcsfs/_version.py        2022-04-04 16:36:39.000000000 
+0200
@@ -22,9 +22,9 @@
     # setup.py/versioneer.py will grep for the variable names, so they must
     # each be defined on a line of their own. _version.py will just call
     # get_keywords().
-    git_refnames = "2022.02.0"
-    git_full = "cf2b3e4bfa10bfcbda03830986beb8cc56ffdcb7"
-    git_date = "2022-02-22 12:57:08 -0500"
+    git_refnames = "2022.3.0"
+    git_full = "e4c16db7f59e0f243965f015e95996f9e7fe9665"
+    git_date = "2022-04-04 10:36:39 -0400"
     keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
     return keywords
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/gcsfs/cli/gcsfuse.py 
new/gcsfs-2022.3.0/gcsfs/cli/gcsfuse.py
--- old/gcsfs-2022.02.0/gcsfs/cli/gcsfuse.py    2022-02-22 18:57:08.000000000 
+0100
+++ new/gcsfs-2022.3.0/gcsfs/cli/gcsfuse.py     2022-04-04 16:36:39.000000000 
+0200
@@ -39,7 +39,7 @@
 def main(
     bucket, mount_point, token, project_id, foreground, threads, cache_files, 
verbose
 ):
-    """ Mount a Google Cloud Storage (GCS) bucket to a local directory """
+    """Mount a Google Cloud Storage (GCS) bucket to a local directory"""
 
     if verbose == 1:
         logging.basicConfig(level=logging.INFO)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/gcsfs/core.py 
new/gcsfs-2022.3.0/gcsfs/core.py
--- old/gcsfs-2022.02.0/gcsfs/core.py   2022-02-22 18:57:08.000000000 +0100
+++ new/gcsfs-2022.3.0/gcsfs/core.py    2022-04-04 16:36:39.000000000 +0200
@@ -48,9 +48,9 @@
 }
 DEFAULT_PROJECT = os.environ.get("GCSFS_DEFAULT_PROJECT", "")
 
-GCS_MIN_BLOCK_SIZE = 2 ** 18
-GCS_MAX_BLOCK_SIZE = 2 ** 28
-DEFAULT_BLOCK_SIZE = 5 * 2 ** 20
+GCS_MIN_BLOCK_SIZE = 2**18
+GCS_MAX_BLOCK_SIZE = 2**28
+DEFAULT_BLOCK_SIZE = 5 * 2**20
 
 
 QUOTE_TABLE = str.maketrans(
@@ -203,12 +203,6 @@
         Cache expiration time in seconds for object metadata cache.
         Set cache_timeout <= 0 for no caching, None for no cache expiration.
     secure_serialize: bool (deprecated)
-    check_connection: bool
-        When token=None, gcsfs will attempt various methods of establishing
-        credentials, falling back to anon. It is possible for a method to
-        find credentials in the system that turn out not to be valid. Setting
-        this parameter to True will ensure that an actual operation is
-        attempted before deciding that credentials are valid.
     requester_pays : bool, or str default False
         Whether to use requester-pays requests. This will include your
         project ID `project` in requests as the `userPorject`, and you'll be
@@ -217,11 +211,15 @@
     session_kwargs: dict
         passed on to aiohttp.ClientSession; can contain, for example,
         proxy settings.
-    endpoin_url: str
+    endpoint_url: str
         If given, use this URL (format protocol://host:port , *without* any
         path part) for communication. If not given, defaults to the value
         of environment variable "STORAGE_EMULATOR_HOST"; if that is not set
         either, will use the standard Google endpoint.
+    default_location: str
+        Default location where buckets are created, like 'US' or 
'EUROPE-WEST3'.
+        You can find a list of all available locations here:
+        https://cloud.google.com/storage/docs/locations#available-locations
     """
 
     scopes = {"read_only", "read_write", "full_control"}
@@ -239,7 +237,7 @@
         consistency="none",
         cache_timeout=None,
         secure_serialize=True,
-        check_connection=False,
+        check_connection=None,
         requests_timeout=None,
         requester_pays=False,
         asynchronous=False,
@@ -247,6 +245,7 @@
         loop=None,
         timeout=None,
         endpoint_url=None,
+        default_location=None,
         **kwargs,
     ):
         super().__init__(
@@ -270,8 +269,15 @@
         self._session = None
         self._endpoint = endpoint_url
         self.session_kwargs = session_kwargs or {}
+        self.default_location = default_location
 
-        self.credentials = GoogleCredentials(project, access, token, 
check_connection)
+        if check_connection:
+            warnings.warn(
+                "The `check_connection` argument is deprecated and will be 
removed in a future release.",
+                DeprecationWarning,
+            )
+
+        self.credentials = GoogleCredentials(project, access, token)
 
         if not self.asynchronous:
             self._session = sync(
@@ -498,7 +504,7 @@
                 return [await self._get_object(path)]
             else:
                 return []
-        out = items + pseudodirs
+        out = pseudodirs + items
         # Don't cache prefixed/partial listings
         if not prefix:
             self.dircache[path] = out
@@ -570,7 +576,8 @@
                 next_page_token = page.get("nextPageToken", None)
 
             buckets = [
-                {"name": i["name"] + "/", "size": 0, "type": "directory"} for 
i in items
+                {**i, "name": i["name"] + "/", "size": 0, "type": "directory"}
+                for i in items
             ]
             self.dircache[""] = buckets
             return buckets
@@ -597,7 +604,11 @@
                 path = self._parent(path)
 
     async def _mkdir(
-        self, bucket, acl="projectPrivate", 
default_acl="bucketOwnerFullControl"
+        self,
+        bucket,
+        acl="projectPrivate",
+        default_acl="bucketOwnerFullControl",
+        location=None,
     ):
         """
         New bucket
@@ -611,18 +622,27 @@
             access for the bucket itself
         default_acl: str, one of ACLs
             default ACL for objects created in this bucket
+        location: Optional[str]
+            Location where buckets are created, like 'US' or 'EUROPE-WEST3'.
+            If not provided, defaults to `self.default_location`.
+            You can find a list of all available locations here:
+            https://cloud.google.com/storage/docs/locations#available-locations
         """
         if bucket in ["", "/"]:
             raise ValueError("Cannot create root bucket")
         if "/" in bucket:
             return
+        json_data = {"name": bucket}
+        location = location or self.default_location
+        if location:
+            json_data["location"] = location
         await self._call(
             method="POST",
             path="b",
             predefinedAcl=acl,
             project=self.project,
             predefinedDefaultObjectAcl=default_acl,
-            json={"name": bucket},
+            json=json_data,
             json_out=True,
         )
         self.invalidate_cache(bucket)
@@ -649,10 +669,18 @@
 
     async def _info(self, path, **kwargs):
         """File information about this path."""
-        path = self._strip_protocol(path).rstrip("/")
+        path = self._strip_protocol(path)
         if "/" not in path:
-            out = await self._call("GET", f"b/{path}", json_out=True)
-            out.update(size=0, type="directory")
+            try:
+                out = await self._call("GET", f"b/{path}", json_out=True)
+                out.update(size=0, type="directory")
+            except OSError:
+                # GET bucket failed, try ls; will have no metadata
+                exists = await self._ls(path)
+                if exists:
+                    out = {"name": path, "size": 0, "type": "directory"}
+                else:
+                    raise FileNotFoundError(path)
             return out
         # Check directory cache for parent dir
         parent_path = self._parent(path)
@@ -673,7 +701,10 @@
             }
         # Check exact file path
         try:
-            return await self._get_object(path)
+            exact = await self._get_object(path)
+            # this condition finds a "placeholder" - still need to check if 
it's a directory
+            if exact["size"] or not exact["name"].endswith("/"):
+                return exact
         except FileNotFoundError:
             pass
         kwargs["detail"] = True  # Force to true for info
@@ -721,14 +752,14 @@
             return sorted([o["name"] for o in out])
 
     def url(self, path):
-        """ Get HTTP URL of the given path """
+        """Get HTTP URL of the given path"""
         u = "{}/download/storage/v1/b/{}/o/{}?alt=media"
         bucket, object = self.split_path(path)
         object = quote_plus(object)
         return u.format(self._location, bucket, object)
 
     async def _cat_file(self, path, start=None, end=None):
-        """ Simple one-shot get of file data """
+        """Simple one-shot get of file data"""
         u2 = self.url(path)
         if start or end:
             head = {"Range": await self._process_limits(path, start, end)}
@@ -948,14 +979,14 @@
         metadata=None,
         consistency=None,
         content_type="application/octet-stream",
-        chunksize=50 * 2 ** 20,
+        chunksize=50 * 2**20,
     ):
         # enforce blocksize should be a multiple of 2**18
         consistency = consistency or self.consistency
         bucket, key = self.split_path(path)
         size = len(data)
         out = None
-        if size < 5 * 2 ** 20:
+        if size < 5 * 2**20:
             return await simple_upload(
                 self, bucket, key, data, metadata, consistency, content_type
             )
@@ -979,7 +1010,7 @@
         metadata=None,
         consistency=None,
         content_type="application/octet-stream",
-        chunksize=50 * 2 ** 20,
+        chunksize=50 * 2**20,
         callback=None,
         **kwargs,
     ):
@@ -995,7 +1026,7 @@
             f0.seek(0)
             callback.set_size(size)
 
-            if size < 5 * 2 ** 20:
+            if size < 5 * 2**20:
                 await simple_upload(
                     self,
                     bucket,
@@ -1205,7 +1236,7 @@
         )
         bucket = client.bucket(bucket)
         blob = bucket.blob(key)
-        return blob.generate_signed_url(expiration=expiration)
+        return blob.generate_signed_url(expiration=expiration, **kwargs)
 
 
 GoogleCredentials.load_tokens()
@@ -1304,11 +1335,11 @@
             self.location = None
 
     def info(self):
-        """ File information about this path """
+        """File information about this path"""
         return self.details
 
     def url(self):
-        """ HTTP link to this file's data """
+        """HTTP link to this file's data"""
         return self.fs.url(self.path)
 
     def _upload_chunk(self, final=False):
@@ -1387,7 +1418,7 @@
         self._upload_chunk(final=True)
 
     def _initiate_upload(self):
-        """ Create multi-upload """
+        """Create multi-upload"""
         self.location = sync(
             self.gcsfs.loop,
             initiate_upload,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/gcsfs/credentials.py 
new/gcsfs-2022.3.0/gcsfs/credentials.py
--- old/gcsfs-2022.02.0/gcsfs/credentials.py    2022-02-22 18:57:08.000000000 
+0100
+++ new/gcsfs-2022.3.0/gcsfs/credentials.py     2022-04-04 16:36:39.000000000 
+0200
@@ -38,19 +38,24 @@
 
 
 class GoogleCredentials:
-    def __init__(self, project, access, token, check_credentials=False):
+    def __init__(self, project, access, token, check_credentials=None):
         self.scope = "https://www.googleapis.com/auth/devstorage."; + access
         self.project = project
         self.access = access
         self.heads = {}
 
-        self.check_credentials = check_credentials
         self.credentials = None
         self.method = None
         self.lock = threading.Lock()
         self.token = token
         self.connect(method=token)
 
+        if check_credentials:
+            warnings.warn(
+                "The `check_credentials` argument is deprecated and will be 
removed in a future release.",
+                DeprecationWarning,
+            )
+
     @classmethod
     def load_tokens(cls):
         """Get "browser" tokens from disc"""
@@ -156,13 +161,14 @@
             return  # anon
         if self.credentials.valid:
             return  # still good
-        req = Request(requests.Session())
-        with self.lock:
-            if self.credentials.valid:
-                return  # repeat to avoid race (but don't want lock in common 
case)
-            logger.debug("GCS refresh")
-            self.credentials.refresh(req)
-            self.apply(self.heads)
+        with requests.Session() as session:
+            req = Request(session)
+            with self.lock:
+                if self.credentials.valid:
+                    return  # repeat to avoid race (but don't want lock in 
common case)
+                logger.debug("GCS refresh")
+                self.credentials.refresh(req)
+                self.apply(self.heads)
 
     def apply(self, out):
         """Insert credential headers in-place to a dictionary"""
@@ -182,7 +188,7 @@
 
     def _connect_browser(self):
         flow = InstalledAppFlow.from_client_config(client_config, [self.scope])
-        credentials = flow.run_console()
+        credentials = flow.run_local_server()
         self.tokens[(self.project, self.access)] = credentials
         self._save_tokens()
         self.credentials = credentials
@@ -212,8 +218,6 @@
             for meth in ["google_default", "cache", "cloud", "anon"]:
                 try:
                     self.connect(method=meth)
-                    if self.check_credentials and meth != "anon":
-                        self.ls("anaconda-public-data")
                     logger.debug("Connected with method %s", meth)
                     break
                 except google.auth.exceptions.GoogleAuthError as e:
@@ -222,6 +226,10 @@
                     logger.debug(
                         'Connection with method "%s" failed' % meth, exc_info=e
                     )
+            else:
+                # Since the 'anon' connection method should always succeed,
+                # getting here means something has gone terribly wrong.
+                raise RuntimeError("All connection methods have failed!")
         else:
             self.__getattribute__("_connect_" + method)()
             self.method = method
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/gcsfs/tests/conftest.py 
new/gcsfs-2022.3.0/gcsfs/tests/conftest.py
--- old/gcsfs-2022.02.0/gcsfs/tests/conftest.py 2022-02-22 18:57:08.000000000 
+0100
+++ new/gcsfs-2022.3.0/gcsfs/tests/conftest.py  2022-04-04 16:36:39.000000000 
+0200
@@ -84,9 +84,19 @@
 
 
 @pytest.fixture
-def gcs(docker_gcs, populate=True):
-    GCSFileSystem.clear_instance_cache()
-    gcs = fsspec.filesystem("gcs", endpoint_url=docker_gcs)
+def gcs_factory(docker_gcs):
+    def factory(default_location=None):
+        GCSFileSystem.clear_instance_cache()
+        return fsspec.filesystem(
+            "gcs", endpoint_url=docker_gcs, default_location=default_location
+        )
+
+    return factory
+
+
+@pytest.fixture
+def gcs(gcs_factory, populate=True):
+    gcs = gcs_factory()
     try:
         # ensure we're empty.
         try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/gcsfs/tests/test_core.py 
new/gcsfs-2022.3.0/gcsfs/tests/test_core.py
--- old/gcsfs-2022.02.0/gcsfs/tests/test_core.py        2022-02-22 
18:57:08.000000000 +0100
+++ new/gcsfs-2022.3.0/gcsfs/tests/test_core.py 2022-04-04 16:36:39.000000000 
+0200
@@ -5,10 +5,13 @@
 from itertools import chain
 from unittest import mock
 from urllib.parse import urlparse, parse_qs, unquote
+from uuid import uuid4
+
 import pytest
 import requests
 
 from fsspec.utils import seek_delimiter
+from fsspec.asyn import sync
 
 from gcsfs.tests.settings import TEST_BUCKET, TEST_PROJECT, 
TEST_REQUESTER_PAYS_BUCKET
 from gcsfs.tests.conftest import (
@@ -86,16 +89,16 @@
 
 def test_multi_upload(gcs):
     fn = TEST_BUCKET + "/test"
-    d = b"01234567" * 2 ** 15
+    d = b"01234567" * 2**15
 
     # something to write on close
-    with gcs.open(fn, "wb", content_type="text/plain", block_size=2 ** 18) as 
f:
+    with gcs.open(fn, "wb", content_type="text/plain", block_size=2**18) as f:
         f.write(d)
         f.write(b"xx")
     assert gcs.cat(fn) == d + b"xx"
     assert gcs.info(fn)["contentType"] == "text/plain"
     # empty buffer on close
-    with gcs.open(fn, "wb", content_type="text/plain", block_size=2 ** 19) as 
f:
+    with gcs.open(fn, "wb", content_type="text/plain", block_size=2**19) as f:
         f.write(d)
         f.write(b"xx")
         f.write(d)
@@ -103,16 +106,16 @@
     assert gcs.info(fn)["contentType"] == "text/plain"
 
     fn = TEST_BUCKET + "/test"
-    d = b"01234567" * 2 ** 15
+    d = b"01234567" * 2**15
 
     # something to write on close
-    with gcs.open(fn, "wb", block_size=2 ** 18) as f:
+    with gcs.open(fn, "wb", block_size=2**18) as f:
         f.write(d)
         f.write(b"xx")
     assert gcs.cat(fn) == d + b"xx"
     assert gcs.info(fn)["contentType"] == "application/octet-stream"
     # empty buffer on close
-    with gcs.open(fn, "wb", block_size=2 ** 19) as f:
+    with gcs.open(fn, "wb", block_size=2**19) as f:
         f.write(d)
         f.write(b"xx")
         f.write(d)
@@ -584,7 +587,7 @@
 
 
 def test_write_blocks(gcs):
-    with gcs.open(TEST_BUCKET + "/temp", "wb", block_size=2 ** 18) as f:
+    with gcs.open(TEST_BUCKET + "/temp", "wb", block_size=2**18) as f:
         f.write(b"a" * 100000)
         assert f.buffer.tell() == 100000
         assert not (f.offset)
@@ -597,11 +600,11 @@
 def test_write_blocks2(gcs):
     if not gcs.on_google:
         pytest.skip("emulator always accepts whole request")
-    with gcs.open(TEST_BUCKET + "/temp1", "wb", block_size=2 ** 18) as f:
-        f.write(b"a" * (2 ** 18 + 1))
+    with gcs.open(TEST_BUCKET + "/temp1", "wb", block_size=2**18) as f:
+        f.write(b"a" * (2**18 + 1))
         # leftover bytes: GCS accepts blocks in multiples of 2**18 bytes
         assert f.buffer.tell() == 1
-    assert gcs.info(TEST_BUCKET + "/temp1")["size"] == 2 ** 18 + 1
+    assert gcs.info(TEST_BUCKET + "/temp1")["size"] == 2**18 + 1
 
 
 def test_readline(gcs):
@@ -647,16 +650,16 @@
 
 
 def test_readline_blocksize(gcs):
-    data = b"ab\n" + b"a" * (2 ** 18) + b"\nab"
+    data = b"ab\n" + b"a" * (2**18) + b"\nab"
     with gcs.open(a, "wb") as f:
         f.write(data)
-    with gcs.open(a, "rb", block_size=2 ** 18) as f:
+    with gcs.open(a, "rb", block_size=2**18) as f:
         result = f.readline()
         expected = b"ab\n"
         assert result == expected
 
         result = f.readline()
-        expected = b"a" * (2 ** 18) + b"\n"
+        expected = b"a" * (2**18) + b"\n"
         assert result == expected
 
         result = f.readline()
@@ -769,7 +772,7 @@
 
 def test_current(gcs):
     assert GCSFileSystem.current() is gcs
-    gcs2 = GCSFileSystem(endpoint_url=gcs._endpoint)
+    gcs2 = GCSFileSystem(endpoint_url=gcs._endpoint, default_location=None)
     assert gcs2.session is gcs.session
 
 
@@ -977,3 +980,58 @@
     gcs.touch(fn2)
     assert gcs.cat(fn2) != data
     assert set(gcs.ls(parent)) == set([fn, fn2])
+
+
+@pytest.mark.parametrize(
+    "location",
+    [
+        (None),
+        ("US"),
+        ("EUROPE-WEST3"),
+        ("europe-west3"),
+    ],
+)
+def test_bucket_location(gcs_factory, location):
+    gcs = gcs_factory(default_location=location)
+    if not gcs.on_google:
+        pytest.skip("emulator can only create buckets in the 'US-CENTRAL1' 
location.")
+    bucket_name = str(uuid4())
+    try:
+        gcs.mkdir(bucket_name)
+        bucket = [
+            b
+            for b in sync(gcs.loop, gcs._list_buckets, timeout=gcs.timeout)
+            if b["name"] == bucket_name + "/"
+        ][0]
+        assert bucket["location"] == (location or "US").upper()
+    finally:
+        gcs.rm(bucket_name, recursive=True)
+
+
+def test_bucket_default_location_overwrite(gcs_factory):
+    gcs = gcs_factory(default_location="US")
+    if not gcs.on_google:
+        pytest.skip("emulator can only create buckets in the 'US-CENTRAL1' 
location.")
+    bucket_name = str(uuid4())
+    try:
+        gcs.mkdir(bucket_name, location="EUROPE-WEST3")
+        bucket = [
+            b
+            for b in sync(gcs.loop, gcs._list_buckets, timeout=gcs.timeout)
+            if b["name"] == bucket_name + "/"
+        ][0]
+        assert bucket["location"] == "EUROPE-WEST3"
+    finally:
+        gcs.rm(bucket_name, recursive=True)
+
+
+def test_dir_marker(gcs):
+    gcs.touch(f"{TEST_BUCKET}/placeholder/")
+    gcs.touch(f"{TEST_BUCKET}/placeholder/inner")
+    out = gcs.find(TEST_BUCKET)
+    assert f"{TEST_BUCKET}/placeholder/" in out
+    gcs.invalidate_cache()
+    out2 = gcs.info(f"{TEST_BUCKET}/placeholder/")
+    out3 = gcs.info(f"{TEST_BUCKET}/placeholder/")
+    assert out2 == out3
+    assert out2["type"] == "directory"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2022.02.0/requirements.txt 
new/gcsfs-2022.3.0/requirements.txt
--- old/gcsfs-2022.02.0/requirements.txt        2022-02-22 18:57:08.000000000 
+0100
+++ new/gcsfs-2022.3.0/requirements.txt 2022-04-04 16:36:39.000000000 +0200
@@ -3,5 +3,5 @@
 google-cloud-storage
 requests
 decorator>4.1.2
-fsspec==2022.02.0
+fsspec==2022.3.0
 aiohttp<4

Reply via email to