Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-gcsfs for openSUSE:Factory 
checked in at 2023-07-07 15:47:32
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-gcsfs (Old)
 and      /work/SRC/openSUSE:Factory/.python-gcsfs.new.23466 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-gcsfs"

Fri Jul  7 15:47:32 2023 rev:17 rq:1097064 version:2023.6.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-gcsfs/python-gcsfs.changes        
2023-06-07 23:07:03.855218712 +0200
+++ /work/SRC/openSUSE:Factory/.python-gcsfs.new.23466/python-gcsfs.changes     
2023-07-07 15:48:47.828723799 +0200
@@ -1,0 +2,8 @@
+Wed Jul  5 08:50:51 UTC 2023 - Ben Greiner <[email protected]>
+
+- Update to 2023.6.0
+  * allow raw/session token for auth (#554)
+  * fix listings_expiry_time kwargs (#551)
+  * allow setting fixed metadata on put/pipe (#550)
+
+-------------------------------------------------------------------

Old:
----
  gcsfs-2023.5.0-gh.tar.gz

New:
----
  gcsfs-2023.6.0-gh.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-gcsfs.spec ++++++
--- /var/tmp/diff_new_pack.0Wsxnn/_old  2023-07-07 15:48:48.652728708 +0200
+++ /var/tmp/diff_new_pack.0Wsxnn/_new  2023-07-07 15:48:48.664728780 +0200
@@ -17,7 +17,7 @@
 
 
 Name:           python-gcsfs
-Version:        2023.5.0
+Version:        2023.6.0
 Release:        0
 Summary:        Filesystem interface over GCS
 License:        BSD-3-Clause
@@ -25,7 +25,9 @@
 # Use the GitHub tarball for test data
 Source:         
https://github.com/fsspec/gcsfs/archive/refs/tags/%{version}.tar.gz#/gcsfs-%{version}-gh.tar.gz
 BuildRequires:  %{python_module base >= 3.8}
+BuildRequires:  %{python_module pip}
 BuildRequires:  %{python_module setuptools}
+BuildRequires:  %{python_module wheel}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
 Requires:       python-aiohttp
@@ -76,10 +78,10 @@
 sed -i 's/--color=yes//' setup.cfg
 
 %build
-%python_build
+%pyproject_wheel
 
 %install
-%python_install
+%pyproject_install
 %python_expand %fdupes %{buildroot}%{$python_sitelib}
 
 %check
@@ -97,12 +99,14 @@
 donttest="test_fuse"
 # finds an existing path on the non-first multiflavor test runs"
 donttest+=" or test_mkdir_with_path"
+# no http error (which is expected) without network
+donttest+=" or test_credentials_from_raw_token"
 %pytest -rfEs -k "not ($donttest)"
 
 %files %{python_files}
 %doc README.rst
 %license LICENSE.txt
-%{python_sitelib}/gcsfs-%{version}*-info
+%{python_sitelib}/gcsfs-%{version}.dist-info
 %{python_sitelib}/gcsfs/
 %exclude %{python_sitelib}/gcsfs/cli/
 

++++++ gcsfs-2023.5.0-gh.tar.gz -> gcsfs-2023.6.0-gh.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/docs/source/changelog.rst 
new/gcsfs-2023.6.0/docs/source/changelog.rst
--- old/gcsfs-2023.5.0/docs/source/changelog.rst        2023-05-07 
21:21:05.000000000 +0200
+++ new/gcsfs-2023.6.0/docs/source/changelog.rst        2023-06-12 
15:54:07.000000000 +0200
@@ -1,6 +1,13 @@
 Changelog
 =========
 
+2023.6.0
+--------
+
+* allow raw/session token for auth (#554)
+* fix listings_expiry_time kwargs (#551)
+* allow setting fixed metadata on put/pipe (#550)
+
 2023.5.0
 --------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/docs/source/index.rst 
new/gcsfs-2023.6.0/docs/source/index.rst
--- old/gcsfs-2023.5.0/docs/source/index.rst    2023-05-07 21:21:05.000000000 
+0200
+++ new/gcsfs-2023.6.0/docs/source/index.rst    2023-06-12 15:54:07.000000000 
+0200
@@ -150,14 +150,11 @@
 
 .. code-block:: python
 
-    loop = ...  # however you create your loop
+    async def run_program():
+        gcs = GCSFileSystem(asynchronous=True)
+        print(await gcs._ls(""))
 
-    async def run_program(loop):
-        gcs = GCSFileSystem(..., asynchronous=True, loop=loop)
-        await gcs.set_session()
-        ...  # perform work
-
-    asyncio.run(run_program(loop))  # or call from your async code
+    asyncio.run(run_program())  # or call from your async code
 
 Concurrent async operations are also used internally for bulk operations
 such as ``pipe/cat``, ``get/put``, ``cp/mv/rm``. The async calls are
@@ -166,6 +163,10 @@
 using async-style programming, you do not need to know about how this
 works, but you might find the implementation interesting.
 
+For every synchronous function there is asynchronous one prefixed by ``_``, but
+the ``open`` operation does not support async operation. If you need it to open
+some file in async manner, it's better to asynchronously download it to
+temporary location and working with it from there.
 
 Proxy
 -----
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/gcsfs/_version.py 
new/gcsfs-2023.6.0/gcsfs/_version.py
--- old/gcsfs-2023.5.0/gcsfs/_version.py        2023-05-07 21:21:05.000000000 
+0200
+++ new/gcsfs-2023.6.0/gcsfs/_version.py        2023-06-12 15:54:07.000000000 
+0200
@@ -22,9 +22,9 @@
     # setup.py/versioneer.py will grep for the variable names, so they must
     # each be defined on a line of their own. _version.py will just call
     # get_keywords().
-    git_refnames = "2023.5.0"
-    git_full = "2354d6b0ae598a9107ca2f63af12ebc98d41de5f"
-    git_date = "2023-05-07 15:21:05 -0400"
+    git_refnames = "2023.6.0"
+    git_full = "d7952a946710da486eb7a39fed1d92f3c065c8f1"
+    git_date = "2023-06-12 09:54:07 -0400"
     keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
     return keywords
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/gcsfs/core.py 
new/gcsfs-2023.6.0/gcsfs/core.py
--- old/gcsfs-2023.5.0/gcsfs/core.py    2023-05-07 21:21:05.000000000 +0200
+++ new/gcsfs-2023.6.0/gcsfs/core.py    2023-06-12 15:54:07.000000000 +0200
@@ -284,9 +284,10 @@
         version_aware=False,
         **kwargs,
     ):
+        if cache_timeout:
+            kwargs["listings_expiry_time"] = cache_timeout
         super().__init__(
             self,
-            listings_expiry_time=cache_timeout,
             asynchronous=asynchronous,
             loop=loop,
             **kwargs,
@@ -1105,6 +1106,7 @@
         metadata=None,
         consistency=None,
         content_type="application/octet-stream",
+        fixed_key_metadata=None,
         chunksize=50 * 2**20,
     ):
         # enforce blocksize should be a multiple of 2**18
@@ -1114,10 +1116,24 @@
         out = None
         if size < 5 * 2**20:
             location = await simple_upload(
-                self, bucket, key, data, metadata, consistency, content_type
+                self,
+                bucket,
+                key,
+                data,
+                metadata,
+                consistency,
+                content_type,
+                fixed_key_metadata=fixed_key_metadata,
             )
         else:
-            location = await initiate_upload(self, bucket, key, content_type, 
metadata)
+            location = await initiate_upload(
+                self,
+                bucket,
+                key,
+                content_type,
+                metadata,
+                fixed_key_metadata=fixed_key_metadata,
+            )
             for offset in range(0, len(data), chunksize):
                 bit = data[offset : offset + chunksize]
                 out = await upload_chunk(
@@ -1140,6 +1156,7 @@
         content_type="application/octet-stream",
         chunksize=50 * 2**20,
         callback=None,
+        fixed_key_metadata=None,
         **kwargs,
     ):
         # enforce blocksize should be a multiple of 2**18
@@ -1165,12 +1182,18 @@
                     consistency=consistency,
                     metadatain=metadata,
                     content_type=content_type,
+                    fixed_key_metadata=fixed_key_metadata,
                 )
                 callback.absolute_update(size)
 
             else:
                 location = await initiate_upload(
-                    self, bucket, key, content_type, metadata
+                    self,
+                    bucket,
+                    key,
+                    content_type,
+                    metadata=metadata,
+                    fixed_key_metadata=fixed_key_metadata,
                 )
                 offset = 0
                 while True:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/gcsfs/credentials.py 
new/gcsfs-2023.6.0/gcsfs/credentials.py
--- old/gcsfs-2023.5.0/gcsfs/credentials.py     2023-05-07 21:21:05.000000000 
+0200
+++ new/gcsfs-2023.6.0/gcsfs/credentials.py     2023-06-12 15:54:07.000000000 
+0200
@@ -139,22 +139,24 @@
         Parameters
         ----------
         token: str, dict or Credentials
-            If a str, try to load as a Service file, or next as a JSON; if
+            If a str and a valid file name, try to load as a Service file, or 
next as a JSON;
+            if not a valid file name, assume it's a valid raw 
(non-renewable/session) token, and pass to Credentials. If
             dict, try to interpret as credentials; if Credentials, use 
directly.
         """
         if isinstance(token, str):
-            if not os.path.exists(token):
-                raise FileNotFoundError(token)
-            try:
-                # is this a "service" token?
-                self._connect_service(token)
-                return
-            except:  # noqa: E722
-                # TODO: catch specific exceptions
-                # some other kind of token file
-                # will raise exception if is not json
-                with open(token) as data:
-                    token = json.load(data)
+            if os.path.exists(token):
+                try:
+                    # is this a "service" token?
+                    self._connect_service(token)
+                    return
+                except:  # noqa: E722
+                    # TODO: catch specific exceptions
+                    # some other kind of token file
+                    # will raise exception if is not json
+                    with open(token) as data:
+                        token = json.load(data)
+            else:
+                token = Credentials(token)
         if isinstance(token, dict):
             credentials = self._dict_to_credentials(token)
         elif isinstance(token, google.auth.credentials.Credentials):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/gcsfs/tests/test_core.py 
new/gcsfs-2023.6.0/gcsfs/tests/test_core.py
--- old/gcsfs-2023.5.0/gcsfs/tests/test_core.py 2023-05-07 21:21:05.000000000 
+0200
+++ new/gcsfs-2023.6.0/gcsfs/tests/test_core.py 2023-06-12 15:54:07.000000000 
+0200
@@ -1364,3 +1364,10 @@
     assert gcs.ls(gparent, detail=False) == [f"{root}/t1/t2/t3"]
     gcs.glob(ggparent + "/")
     assert gcs.ls(gparent, detail=False) == [f"{root}/t1/t2/t3"]
+
+
+def test_expiry_keyword():
+    gcs = GCSFileSystem(listings_expiry_time=1, token="anon")
+    assert gcs.dircache.listings_expiry_time == 1
+    gcs = GCSFileSystem(cache_timeout=1, token="anon")
+    assert gcs.dircache.listings_expiry_time == 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/gcsfs/tests/test_credentials.py 
new/gcsfs-2023.6.0/gcsfs/tests/test_credentials.py
--- old/gcsfs-2023.5.0/gcsfs/tests/test_credentials.py  2023-05-07 
21:21:05.000000000 +0200
+++ new/gcsfs-2023.6.0/gcsfs/tests/test_credentials.py  2023-06-12 
15:54:07.000000000 +0200
@@ -1,7 +1,18 @@
+import pytest
+
+from gcsfs import GCSFileSystem
 from gcsfs.credentials import GoogleCredentials
+from gcsfs.retry import HttpError
 
 
 def test_googlecredentials_none():
     credentials = GoogleCredentials(project="myproject", token=None, 
access="read_only")
     headers = {}
     credentials.apply(headers)
+
+
[email protected]("token", ["", "incorrect.token", "x" * 100])
+def test_credentials_from_raw_token(token):
+    with pytest.raises(HttpError, match="Invalid Credentials"):
+        fs = GCSFileSystem(project="myproject", token=token)
+        fs.ls("/")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/gcsfs-2023.5.0/requirements.txt 
new/gcsfs-2023.6.0/requirements.txt
--- old/gcsfs-2023.5.0/requirements.txt 2023-05-07 21:21:05.000000000 +0200
+++ new/gcsfs-2023.6.0/requirements.txt 2023-06-12 15:54:07.000000000 +0200
@@ -1,6 +1,6 @@
 aiohttp!=4.0.0a0, !=4.0.0a1
 decorator>4.1.2
-fsspec==2023.5.0
+fsspec==2023.6.0
 google-auth>=1.2
 google-auth-oauthlib
 google-cloud-storage

Reply via email to