This is an automated email from the ASF dual-hosted git repository.

fokko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new 2477ae7263 Python: GCS Support (#8207)
2477ae7263 is described below

commit 2477ae7263a0a4b880cb73d297739698392b5464
Author: Fokko Driesprong <[email protected]>
AuthorDate: Mon Aug 14 14:43:46 2023 +0200

    Python: GCS Support (#8207)
    
    * pyiceberg: Add Google Cloud Storage support
    
    * updated project dependecies
    
    * Update python/pyiceberg/io/fsspec.py
    
    Co-authored-by: Fokko Driesprong <[email protected]>
    
    * Fix some configuration
    
    * MOAR tests
    
    * Start GCS server
    
    * Fix makefile
    
    * Move to http
    
    ---------
    
    Co-authored-by: Victoria Bukta <[email protected]>
    Co-authored-by: Victoria Bukta <[email protected]>
---
 python/Makefile                          |   8 +-
 python/dev/docker-compose-gcs-server.yml |  30 +++
 python/dev/run-gcs-server.sh             |  33 +++
 python/mkdocs/docs/configuration.md      |  18 ++
 python/mkdocs/docs/index.md              |   3 +-
 python/poetry.lock                       | 366 +++++++++++++++++++++++++++++--
 python/pyiceberg/io/__init__.py          |  11 +
 python/pyiceberg/io/fsspec.py            |  31 +++
 python/pyiceberg/io/pyarrow.py           |  19 ++
 python/pyiceberg/utils/datetime.py       |   6 +
 python/pyproject.toml                    |  11 +-
 python/tests/conftest.py                 |  40 +++-
 python/tests/io/test_fsspec.py           | 176 +++++++++++++++
 python/tests/io/test_pyarrow.py          | 182 +++++++++++++++
 python/tests/utils/test_datetime.py      |   6 +-
 15 files changed, 918 insertions(+), 22 deletions(-)

diff --git a/python/Makefile b/python/Makefile
index 932c90dd77..df5f7005d9 100644
--- a/python/Makefile
+++ b/python/Makefile
@@ -17,7 +17,8 @@
 
 install:
        pip install poetry
-       poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E 
ray -E sql-postgres
+       poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E 
ray -E sql-postgres -E gcsfs
+
 
 check-license:
        ./dev/check-license
@@ -52,7 +53,12 @@ test-adlfs:
 test-coverage:
        sh ./dev/run-minio.sh
        sh ./dev/run-azurite.sh
+       sh ./dev/run-gcs-server.sh
        poetry run coverage run --source=pyiceberg/ -m pytest tests/ -m "not 
integration" ${PYTEST_ARGS}
        poetry run coverage report -m --fail-under=90
        poetry run coverage html
        poetry run coverage xml
+
+test-gcs:
+       sh ./dev/run-gcs-server.sh
+       poetry run  pytest tests/ -m gcs ${PYTEST_ARGS}
diff --git a/python/dev/docker-compose-gcs-server.yml 
b/python/dev/docker-compose-gcs-server.yml
new file mode 100644
index 0000000000..2a5164c81c
--- /dev/null
+++ b/python/dev/docker-compose-gcs-server.yml
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+version: "3"
+
+services:
+  gcs-server:
+    image: fsouza/fake-gcs-server
+    container_name: gcs-server
+    ports:
+      - 4443:4443
+    entrypoint: >
+      /bin/sh -c "
+      mkdir -p /data/warehouse;
+      /bin/fake-gcs-server -data /data -scheme http;
+      exit 0;
+      "
diff --git a/python/dev/run-gcs-server.sh b/python/dev/run-gcs-server.sh
new file mode 100644
index 0000000000..289d89009a
--- /dev/null
+++ b/python/dev/run-gcs-server.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set -ex
+
+if [ $(docker ps -q --filter "name=gcs-server" --filter "status=running" ) ]; 
then
+    echo "Fake GCS Server running"
+else
+    docker-compose -f dev/docker-compose-gcs-server.yml kill
+    docker-compose -f dev/docker-compose-gcs-server.yml up -d
+    while [ -z $(docker ps -q --filter "name=gcs-server" --filter 
"status=running" ) ]
+    do
+      echo "Waiting for Fake GCS Server"
+      sleep 1
+    done
+fi
diff --git a/python/mkdocs/docs/configuration.md 
b/python/mkdocs/docs/configuration.md
index 0510e4ff66..e9f50042f2 100644
--- a/python/mkdocs/docs/configuration.md
+++ b/python/mkdocs/docs/configuration.md
@@ -80,6 +80,8 @@ For the FileIO there are several configuration options 
available:
 
 ### Azure Data lake
 
+### Azure Data lake
+
 | Key                     | Example                                            
                                       | Description                            
                                                                                
                                                                                
                                                                                
|
 | ----------------------- | 
-----------------------------------------------------------------------------------------
 | 
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
 |
 | adlfs.connection-string | 
AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqF...;BlobEndpoint=http://localhost/
 | A [connection 
string](https://learn.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string).
 This could be used to use FileIO with any adlfs-compatible object storage 
service that has a different endpoint (like 
[azurite](https://github.com/azure/azurite)). |
@@ -90,6 +92,22 @@ For the FileIO there are several configuration options 
available:
 | adlfs.client-id         | ad667be4-b811-11ed-afa1-0242ac120002               
                                       | The client-id                          
                                                                                
                                                                                
                                                                                
|
 | adlfs.client-secret     | oCA3R6P\*ka#oa1Sms2J74z...                         
                                       | The client-secret                      
                                                                                
                                                                                
                                                                                
|
 
+### Google Cloud Storage
+
+| Key                        | Example             | Description               
                                                                                
                                                                                
                                                                                
                     |
+| -------------------------- | ------------------- | 
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
 |
+| gcs.project-id             | my-gcp-project      | Configure Google Cloud 
Project for GCS FileIO.                                                         
                                                                                
                                                                                
                        |
+| gcs.oauth.token            | ya29.dr.AfM...      | Configure method 
authentication to GCS for FileIO. Can be the following, 'google_default', 
'cache', 'anon', 'browser', 'cloud'. If not specified your credentials will be 
resolved in the following order: gcloud CLI default, gcsfs cached token, google 
compute metadata service, anonymous. |
+| gcs.oauth.token-expires-at | 1690971805918       | Configure expiration for 
credential generated with an access token. Milliseconds since epoch             
                                                                                
                                                                                
                      |
+| gcs.access                 | read_only           | Configure client to have 
specific access. Must be one of 'read_only', 'read_write', or 'full_control'    
                                                                                
                                                                                
                      |
+| gcs.consistency            | md5                 | Configure the check 
method when writing files. Must be one of 'none', 'size', or 'md5'              
                                                                                
                                                                                
                           |
+| gcs.cache-timeout          | 60                  | Configure the cache 
expiration time in seconds for object metadata cache                            
                                                                                
                                                                                
                           |
+| gcs.requester-pays         | False               | Configure whether to use 
requester-pays requests                                                         
                                                                                
                                                                                
                      |
+| gcs.session-kwargs         | {}                  | Configure a dict of 
parameters to pass on to aiohttp.ClientSession; can contain, for example, proxy 
settings.                                                                       
                                                                                
                           |
+| gcs.endpoint               | http://0.0.0.0:4443 | Configure an alternative 
endpoint for the GCS FileIO to access (format protocol://host:port) If not 
given, defaults to the value of environment variable "STORAGE_EMULATOR_HOST"; 
if that is not set either, will use the standard Google endpoint.               
                             |
+| gcs.default-location       | US                  | Configure the default 
location where buckets are created, like 'US' or 'EUROPE-WEST3'.                
                                                                                
                                                                                
                         |
+| gcs.version-aware          | False               | Configure whether to 
support object versioning on the GCS bucket.                                    
                                                                                
                                                                                
                          |
+
 ## REST Catalog
 
 ```yaml
diff --git a/python/mkdocs/docs/index.md b/python/mkdocs/docs/index.md
index 36fb656fca..89b66129e3 100644
--- a/python/mkdocs/docs/index.md
+++ b/python/mkdocs/docs/index.md
@@ -60,7 +60,8 @@ You can mix and match optional dependencies depending on your 
needs:
 | s3fs     | S3FS as a FileIO implementation to interact with the object store 
   |
 | adlfs    | ADLFS as a FileIO implementation to interact with the object 
store   |
 | snappy   | Support for snappy Avro compression                               
   |
+| gcs      | GCS as the FileIO implementation to interact with the object 
store   |
 
-You either need to install `s3fs`, `adlfs` or `pyarrow` for fetching files.
+You either need to install `s3fs`, `adlfs`, `gcs`, or `pyarrow` for fetching 
files.
 
 There is both a [CLI](cli.md) and [Python API](api.md) available.
diff --git a/python/poetry.lock b/python/poetry.lock
index 6f2d6fba8d..1a99547c65 100644
--- a/python/poetry.lock
+++ b/python/poetry.lock
@@ -341,6 +341,17 @@ test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov 
(>=2.12)", "pytest-moc
 typing = ["importlib-metadata (>=5.1)", "mypy (==0.991)", "tomli", 
"typing-extensions (>=3.7.4.3)"]
 virtualenv = ["virtualenv (>=20.0.35)"]
 
+[[package]]
+name = "cachetools"
+version = "5.3.1"
+description = "Extensible memoizing collections and decorators"
+optional = true
+python-versions = ">=3.7"
+files = [
+    {file = "cachetools-5.3.1-py3-none-any.whl", hash = 
"sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"},
+    {file = "cachetools-5.3.1.tar.gz", hash = 
"sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"},
+]
+
 [[package]]
 name = "certifi"
 version = "2023.7.22"
@@ -430,13 +441,13 @@ pycparser = "*"
 
 [[package]]
 name = "cfgv"
-version = "3.3.1"
+version = "3.4.0"
 description = "Validate configuration and produce human readable error 
messages."
 optional = false
-python-versions = ">=3.6.1"
+python-versions = ">=3.8"
 files = [
-    {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = 
"sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
-    {file = "cfgv-3.3.1.tar.gz", hash = 
"sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
+    {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = 
"sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+    {file = "cfgv-3.4.0.tar.gz", hash = 
"sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
 ]
 
 [[package]]
@@ -727,6 +738,17 @@ files = [
     {file = "Cython-3.0.0.tar.gz", hash = 
"sha256:350b18f9673e63101dbbfcf774ee2f57c20ac4636d255741d76ca79016b1bd82"},
 ]
 
+[[package]]
+name = "decorator"
+version = "5.1.1"
+description = "Decorators for Humans"
+optional = true
+python-versions = ">=3.5"
+files = [
+    {file = "decorator-5.1.1-py3-none-any.whl", hash = 
"sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
+    {file = "decorator-5.1.1.tar.gz", hash = 
"sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
+]
+
 [[package]]
 name = "distlib"
 version = "0.3.7"
@@ -984,6 +1006,249 @@ smb = ["smbprotocol"]
 ssh = ["paramiko"]
 tqdm = ["tqdm"]
 
+[[package]]
+name = "gcsfs"
+version = "2023.6.0"
+description = "Convenient Filesystem interface over GCS"
+optional = true
+python-versions = ">=3.8"
+files = [
+    {file = "gcsfs-2023.6.0-py2.py3-none-any.whl", hash = 
"sha256:3b3c7d8eddd4ec1380f3b49fbb861ee1e974adb223564401f10884b6260d406f"},
+    {file = "gcsfs-2023.6.0.tar.gz", hash = 
"sha256:30b14fccadb3b7f0d99b2cd03bd8507c40f3a9a7d05847edca571f642bedbdff"},
+]
+
+[package.dependencies]
+aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1"
+decorator = ">4.1.2"
+fsspec = "2023.6.0"
+google-auth = ">=1.2"
+google-auth-oauthlib = "*"
+google-cloud-storage = "*"
+requests = "*"
+
+[package.extras]
+crc = ["crcmod"]
+gcsfuse = ["fusepy"]
+
+[[package]]
+name = "google-api-core"
+version = "2.11.1"
+description = "Google API client core library"
+optional = true
+python-versions = ">=3.7"
+files = [
+    {file = "google-api-core-2.11.1.tar.gz", hash = 
"sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"},
+    {file = "google_api_core-2.11.1-py3-none-any.whl", hash = 
"sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"},
+]
+
+[package.dependencies]
+google-auth = ">=2.14.1,<3.0.dev0"
+googleapis-common-protos = ">=1.56.2,<2.0.dev0"
+protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || 
>4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || 
>4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
+requests = ">=2.18.0,<3.0.0.dev0"
+
+[package.extras]
+grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", 
"grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
+grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
+grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
+
+[[package]]
+name = "google-auth"
+version = "2.22.0"
+description = "Google Authentication Library"
+optional = true
+python-versions = ">=3.6"
+files = [
+    {file = "google-auth-2.22.0.tar.gz", hash = 
"sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"},
+    {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = 
"sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"},
+]
+
+[package.dependencies]
+cachetools = ">=2.0.0,<6.0"
+pyasn1-modules = ">=0.2.1"
+rsa = ">=3.1.4,<5"
+six = ">=1.9.0"
+urllib3 = "<2.0"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
+enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
+pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
+reauth = ["pyu2f (>=0.1.5)"]
+requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
+
+[[package]]
+name = "google-auth-oauthlib"
+version = "1.0.0"
+description = "Google Authentication Library"
+optional = true
+python-versions = ">=3.6"
+files = [
+    {file = "google-auth-oauthlib-1.0.0.tar.gz", hash = 
"sha256:e375064964820b47221a7e1b7ee1fd77051b6323c3f9e3e19785f78ab67ecfc5"},
+    {file = "google_auth_oauthlib-1.0.0-py2.py3-none-any.whl", hash = 
"sha256:95880ca704928c300f48194d1770cf5b1462835b6e49db61445a520f793fd5fb"},
+]
+
+[package.dependencies]
+google-auth = ">=2.15.0"
+requests-oauthlib = ">=0.7.0"
+
+[package.extras]
+tool = ["click (>=6.0.0)"]
+
+[[package]]
+name = "google-cloud-core"
+version = "2.3.3"
+description = "Google Cloud API client core library"
+optional = true
+python-versions = ">=3.7"
+files = [
+    {file = "google-cloud-core-2.3.3.tar.gz", hash = 
"sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"},
+    {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = 
"sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"},
+]
+
+[package.dependencies]
+google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev"
+google-auth = ">=1.25.0,<3.0dev"
+
+[package.extras]
+grpc = ["grpcio (>=1.38.0,<2.0dev)"]
+
+[[package]]
+name = "google-cloud-storage"
+version = "2.10.0"
+description = "Google Cloud Storage API client library"
+optional = true
+python-versions = ">=3.7"
+files = [
+    {file = "google-cloud-storage-2.10.0.tar.gz", hash = 
"sha256:934b31ead5f3994e5360f9ff5750982c5b6b11604dc072bc452c25965e076dc7"},
+    {file = "google_cloud_storage-2.10.0-py2.py3-none-any.whl", hash = 
"sha256:9433cf28801671de1c80434238fb1e7e4a1ba3087470e90f70c928ea77c2b9d7"},
+]
+
+[package.dependencies]
+google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev"
+google-auth = ">=1.25.0,<3.0dev"
+google-cloud-core = ">=2.3.0,<3.0dev"
+google-resumable-media = ">=2.3.2"
+requests = ">=2.18.0,<3.0.0dev"
+
+[package.extras]
+protobuf = ["protobuf (<5.0.0dev)"]
+
+[[package]]
+name = "google-crc32c"
+version = "1.5.0"
+description = "A python wrapper of the C library 'Google CRC32C'"
+optional = true
+python-versions = ">=3.7"
+files = [
+    {file = "google-crc32c-1.5.0.tar.gz", hash = 
"sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash 
= "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = 
"sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"},
+    {file = 
"google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"},
+    {file = 
"google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", 
hash = 
"sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"},
+    {file = 
"google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
 hash = 
"sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash 
= "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = 
"sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = 
"sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = 
"sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"},
+    {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = 
"sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash 
= "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = 
"sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"},
+    {file = 
"google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"},
+    {file = 
"google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", 
hash = 
"sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"},
+    {file = 
"google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
 hash = 
"sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash 
= "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = 
"sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = 
"sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = 
"sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"},
+    {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = 
"sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"},
+    {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = 
"sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"},
+    {file = 
"google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"},
+    {file = 
"google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", 
hash = 
"sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"},
+    {file = 
"google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
 hash = 
"sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"},
+    {file = 
"google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", 
hash = 
"sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"},
+    {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = 
"sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"},
+    {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = 
"sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"},
+    {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = 
"sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"},
+    {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = 
"sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"},
+    {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = 
"sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = 
"sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = 
"sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"},
+    {file = 
"google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"},
+    {file = 
"google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", 
hash = 
"sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"},
+    {file = 
"google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"},
+    {file = 
"google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", 
hash = 
"sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = 
"sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = 
"sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = 
"sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = 
"sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"},
+    {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = 
"sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = 
"sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = 
"sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"},
+    {file = 
"google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"},
+    {file = 
"google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", 
hash = 
"sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"},
+    {file = 
"google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"},
+    {file = 
"google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", 
hash = 
"sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = 
"sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = 
"sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = 
"sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = 
"sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"},
+    {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = 
"sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"},
+    {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", 
hash = 
"sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"},
+    {file = 
"google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"},
+    {file = 
"google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"},
+    {file = 
"google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
 hash = 
"sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"},
+    {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = 
"sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"},
+    {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", 
hash = 
"sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"},
+    {file = 
"google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"},
+    {file = 
"google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"},
+    {file = 
"google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
 hash = 
"sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"},
+    {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = 
"sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"},
+    {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", 
hash = 
"sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"},
+    {file = 
"google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
 hash = 
"sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"},
+    {file = 
"google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"},
+    {file = 
"google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
 hash = 
"sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"},
+    {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = 
"sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"},
+]
+
+[package.extras]
+testing = ["pytest"]
+
+[[package]]
+name = "google-resumable-media"
+version = "2.5.0"
+description = "Utilities for Google Media Downloads and Resumable Uploads"
+optional = true
+python-versions = ">= 3.7"
+files = [
+    {file = "google-resumable-media-2.5.0.tar.gz", hash = 
"sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"},
+    {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = 
"sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"},
+]
+
+[package.dependencies]
+google-crc32c = ">=1.0,<2.0dev"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"]
+requests = ["requests (>=2.18.0,<3.0.0dev)"]
+
+[[package]]
+name = "googleapis-common-protos"
+version = "1.60.0"
+description = "Common protobufs used in Google APIs"
+optional = true
+python-versions = ">=3.7"
+files = [
+    {file = "googleapis-common-protos-1.60.0.tar.gz", hash = 
"sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"},
+    {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = 
"sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"},
+]
+
+[package.dependencies]
+protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || 
>4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || 
>4.21.5,<5.0.0.dev0"
+
+[package.extras]
+grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
+
 [[package]]
 name = "greenlet"
 version = "2.0.2"
@@ -1694,6 +1959,22 @@ files = [
     {file = "numpy-1.24.4.tar.gz", hash = 
"sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"},
 ]
 
+[[package]]
+name = "oauthlib"
+version = "3.2.2"
+description = "A generic, spec-compliant, thorough implementation of the OAuth 
request-signing logic"
+optional = true
+python-versions = ">=3.6"
+files = [
+    {file = "oauthlib-3.2.2-py3-none-any.whl", hash = 
"sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
+    {file = "oauthlib-3.2.2.tar.gz", hash = 
"sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
+]
+
+[package.extras]
+rsa = ["cryptography (>=3.0.0)"]
+signals = ["blinker (>=1.4.0)"]
+signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
+
 [[package]]
 name = "packaging"
 version = "23.1"
@@ -1978,6 +2259,31 @@ files = [
 [package.dependencies]
 numpy = ">=1.16.6"
 
+[[package]]
+name = "pyasn1"
+version = "0.5.0"
+description = "Pure-Python implementation of ASN.1 types and DER/BER/CER 
codecs (X.208)"
+optional = true
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+    {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = 
"sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"},
+    {file = "pyasn1-0.5.0.tar.gz", hash = 
"sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"},
+]
+
+[[package]]
+name = "pyasn1-modules"
+version = "0.3.0"
+description = "A collection of ASN.1-based protocols modules"
+optional = true
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+    {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = 
"sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
+    {file = "pyasn1_modules-0.3.0.tar.gz", hash = 
"sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.4.6,<0.6.0"
+
 [[package]]
 name = "pycparser"
 version = "2.21"
@@ -2327,26 +2633,21 @@ files = [
     {file = "ray-2.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = 
"sha256:120631ce1e6206ceb0395aeca5c1dcef4b0b65dd0a0cd53053e131254da96f0a"},
     {file = "ray-2.6.2-cp310-cp310-manylinux2014_aarch64.whl", hash = 
"sha256:8aeef75db0d9922f69a6ab31ffd3df3f40fff404c30bfb4ca76e480c53df98ce"},
     {file = "ray-2.6.2-cp310-cp310-manylinux2014_x86_64.whl", hash = 
"sha256:050d3c2ac71a8ca7779c7b590a91400f45e071b298b67727949ffdcc096406e0"},
-    {file = "ray-2.6.2-cp310-cp310-win_amd64.whl", hash = 
"sha256:38a23c417b5e96532eee9480ccaebdde1af7fb2f9d884155c5aef894aa7dda25"},
     {file = "ray-2.6.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = 
"sha256:2356454ec63135b6dee3e46b091c76b7daec09bae05aa943a767f884377acda8"},
     {file = "ray-2.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = 
"sha256:e924173198e2d37e29baa7b7893221594139442e9a0b0334fa80f1c8a7b5a6ed"},
     {file = "ray-2.6.2-cp311-cp311-manylinux2014_aarch64.whl", hash = 
"sha256:7ee8afffa1c971a71570b6a98de0c69d83c99423a97d29000aeabb706cc1baab"},
     {file = "ray-2.6.2-cp311-cp311-manylinux2014_x86_64.whl", hash = 
"sha256:2fa25cf5071082c386dbc086f917f6a4a9f29980eff2cb94d939fd23dd16735a"},
-    {file = "ray-2.6.2-cp311-cp311-win_amd64.whl", hash = 
"sha256:58e632c7cc69b560b46826c454777cf44518a09fa5e3cf275e0aba167bf49cca"},
     {file = "ray-2.6.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = 
"sha256:8ecac78203045ef33236ec913775fe9f30d5f3ebe7ee090276c05a3a1fc1ba31"},
     {file = "ray-2.6.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = 
"sha256:267b52ed13422f9bc57a7ae89289d2cdb6174fd27c1ea842a2618b2d054cf795"},
     {file = "ray-2.6.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = 
"sha256:a0507f49113d98eea24903b5ef384e631997ebfd4934b94ce13c15fbb0adb0c4"},
-    {file = "ray-2.6.2-cp37-cp37m-win_amd64.whl", hash = 
"sha256:4c919861433cb70d1e8b8605eda8319780b8f3c07c1abe73afba15aad5aea54f"},
     {file = "ray-2.6.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = 
"sha256:ff3d5ea1057c4d57bae9b921df0bdd98263c91d0fe7a4221a179bb9034005795"},
     {file = "ray-2.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = 
"sha256:302213def2b0d9fd2039d14b992a2dffa5b4db36ac8d154a219d8bd8e580d694"},
     {file = "ray-2.6.2-cp38-cp38-manylinux2014_aarch64.whl", hash = 
"sha256:8f34ddd7012b5908d19a12e452138e941d83e38a1fbce2db4545c281957bbda5"},
     {file = "ray-2.6.2-cp38-cp38-manylinux2014_x86_64.whl", hash = 
"sha256:9a4cdabff16caaed76e9b7f2a9d94cfbae30073b6dc8956f55d70b99e218d1dc"},
-    {file = "ray-2.6.2-cp38-cp38-win_amd64.whl", hash = 
"sha256:a4b405e3595993db91703e9847fe51e41fb98c3181ee904d673da82e25fe7e86"},
     {file = "ray-2.6.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = 
"sha256:86b57fb864a9328971d7e554e0c55608735e94434f13948a58bb07423c783a39"},
     {file = "ray-2.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = 
"sha256:21608b9dbc19175fbb1832ec9296e7c93cf416d28b0e21ee7e2667da7af952b7"},
     {file = "ray-2.6.2-cp39-cp39-manylinux2014_aarch64.whl", hash = 
"sha256:4ab10849705f41923ec1ccf597f3881c9f3f304c43da3e6f5b794c2072694f77"},
     {file = "ray-2.6.2-cp39-cp39-manylinux2014_x86_64.whl", hash = 
"sha256:3f359fdb88406528c564e66dbe848ab98679ddb046d44465ebd3933fa1ecdd62"},
-    {file = "ray-2.6.2-cp39-cp39-win_amd64.whl", hash = 
"sha256:d23a9059d84b416df135512d224cfbe4ed45de8fc727eb6c616468ab9dbdd7bd"},
 ]
 
 [package.dependencies]
@@ -2437,6 +2738,24 @@ six = "*"
 fixture = ["fixtures"]
 test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", 
"testtools"]
 
+[[package]]
+name = "requests-oauthlib"
+version = "1.3.1"
+description = "OAuthlib authentication support for Requests."
+optional = true
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+    {file = "requests-oauthlib-1.3.1.tar.gz", hash = 
"sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
+    {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = 
"sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
+]
+
+[package.dependencies]
+oauthlib = ">=3.0.0"
+requests = ">=2.0.0"
+
+[package.extras]
+rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
+
 [[package]]
 name = "responses"
 version = "0.23.3"
@@ -2582,6 +2901,20 @@ files = [
     {file = "rpds_py-0.9.2.tar.gz", hash = 
"sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"},
 ]
 
+[[package]]
+name = "rsa"
+version = "4.9"
+description = "Pure-Python RSA implementation"
+optional = true
+python-versions = ">=3.6,<4"
+files = [
+    {file = "rsa-4.9-py3-none-any.whl", hash = 
"sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
+    {file = "rsa-4.9.tar.gz", hash = 
"sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.1.3"
+
 [[package]]
 name = "s3fs"
 version = "2023.6.0"
@@ -2829,13 +3162,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
 
 [[package]]
 name = "virtualenv"
-version = "20.24.2"
+version = "20.24.3"
 description = "Virtual Python Environment builder"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "virtualenv-20.24.2-py3-none-any.whl", hash = 
"sha256:43a3052be36080548bdee0b42919c88072037d50d56c28bd3f853cbe92b953ff"},
-    {file = "virtualenv-20.24.2.tar.gz", hash = 
"sha256:fd8a78f46f6b99a67b7ec5cf73f92357891a7b3a40fd97637c27f854aae3b9e0"},
+    {file = "virtualenv-20.24.3-py3-none-any.whl", hash = 
"sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"},
+    {file = "virtualenv-20.24.3.tar.gz", hash = 
"sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"},
 ]
 
 [package.dependencies]
@@ -2849,13 +3182,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", 
"coverage-enable-subprocess
 
 [[package]]
 name = "werkzeug"
-version = "2.3.6"
+version = "2.3.7"
 description = "The comprehensive WSGI web application library."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = 
"sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"},
-    {file = "Werkzeug-2.3.6.tar.gz", hash = 
"sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"},
+    {file = "werkzeug-2.3.7-py3-none-any.whl", hash = 
"sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"},
+    {file = "werkzeug-2.3.7.tar.gz", hash = 
"sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"},
 ]
 
 [package.dependencies]
@@ -3123,6 +3456,7 @@ cffi = ["cffi (>=1.11)"]
 adlfs = ["adlfs"]
 duckdb = ["duckdb", "pyarrow"]
 dynamodb = ["boto3"]
+gcsfs = ["gcsfs"]
 glue = ["boto3"]
 hive = ["thrift"]
 pandas = ["pandas", "pyarrow"]
@@ -3136,4 +3470,4 @@ zstandard = ["zstandard"]
 [metadata]
 lock-version = "2.0"
 python-versions = "^3.8"
-content-hash = 
"48903baf7101382f1097f131a34fc1c491a929e6eb7dc955f31cbb579ae8fbde"
+content-hash = 
"e547ce429123b5ec2bd378b2a61fb8e5a9650a041faf32367109d4855dedbb81"
diff --git a/python/pyiceberg/io/__init__.py b/python/pyiceberg/io/__init__.py
index b611a91d24..ae8cafccaa 100644
--- a/python/pyiceberg/io/__init__.py
+++ b/python/pyiceberg/io/__init__.py
@@ -55,6 +55,17 @@ HDFS_HOST = "hdfs.host"
 HDFS_PORT = "hdfs.port"
 HDFS_USER = "hdfs.user"
 HDFS_KERB_TICKET = "hdfs.kerberos_ticket"
+GCS_TOKEN = "gcs.oauth2.token"
+GCS_TOKEN_EXPIRES_AT_MS = "gcs.oauth2.token-expires-at"
+GCS_PROJECT_ID = "gcs.project-id"
+GCS_ACCESS = "gcs.access"
+GCS_CONSISTENCY = "gcs.consistency"
+GCS_CACHE_TIMEOUT = "gcs.cache-timeout"
+GCS_REQUESTER_PAYS = "gcs.requester-pays"
+GCS_SESSION_KWARGS = "gcs.session-kwargs"
+GCS_ENDPOINT = "gcs.endpoint"
+GCS_DEFAULT_LOCATION = "gcs.default-bucket-location"
+GCS_VERSION_AWARE = "gcs.version-aware"
 
 
 @runtime_checkable
diff --git a/python/pyiceberg/io/fsspec.py b/python/pyiceberg/io/fsspec.py
index 65472904d0..719e743333 100644
--- a/python/pyiceberg/io/fsspec.py
+++ b/python/pyiceberg/io/fsspec.py
@@ -16,6 +16,7 @@
 # under the License.
 """FileIO implementation for reading and writing table files that uses fsspec 
compatible filesystems."""
 import errno
+import json
 import logging
 import os
 from functools import lru_cache, partial
@@ -37,6 +38,16 @@ from requests import HTTPError
 from pyiceberg.catalog import TOKEN
 from pyiceberg.exceptions import SignError
 from pyiceberg.io import (
+    GCS_ACCESS,
+    GCS_CACHE_TIMEOUT,
+    GCS_CONSISTENCY,
+    GCS_DEFAULT_LOCATION,
+    GCS_ENDPOINT,
+    GCS_PROJECT_ID,
+    GCS_REQUESTER_PAYS,
+    GCS_SESSION_KWARGS,
+    GCS_TOKEN,
+    GCS_VERSION_AWARE,
     S3_ACCESS_KEY_ID,
     S3_ENDPOINT,
     S3_PROXY_URI,
@@ -124,6 +135,24 @@ def _s3(properties: Properties) -> AbstractFileSystem:
     return fs
 
 
+def _gs(properties: Properties) -> AbstractFileSystem:
+    # https://gcsfs.readthedocs.io/en/latest/api.html#gcsfs.core.GCSFileSystem
+    from gcsfs import GCSFileSystem
+
+    return GCSFileSystem(
+        project=properties.get(GCS_PROJECT_ID),
+        access=properties.get(GCS_ACCESS, "full_control"),
+        token=properties.get(GCS_TOKEN),
+        consistency=properties.get(GCS_CONSISTENCY, "none"),
+        cache_timeout=properties.get(GCS_CACHE_TIMEOUT),
+        requester_pays=properties.get(GCS_REQUESTER_PAYS, False),
+        session_kwargs=json.loads(properties.get(GCS_SESSION_KWARGS, "{}")),
+        endpoint_url=properties.get(GCS_ENDPOINT),
+        default_location=properties.get(GCS_DEFAULT_LOCATION),
+        version_aware=properties.get(GCS_VERSION_AWARE, "false").lower() == 
"true",
+    )
+
+
 def _adlfs(properties: Properties) -> AbstractFileSystem:
     from adlfs import AzureBlobFileSystem
 
@@ -145,6 +174,8 @@ SCHEME_TO_FS = {
     "s3n": _s3,
     "abfs": _adlfs,
     "abfss": _adlfs,
+    "gs": _gs,
+    "gcs": _gs,
 }
 
 
diff --git a/python/pyiceberg/io/pyarrow.py b/python/pyiceberg/io/pyarrow.py
index fba16f9992..eb48b34500 100644
--- a/python/pyiceberg/io/pyarrow.py
+++ b/python/pyiceberg/io/pyarrow.py
@@ -57,6 +57,7 @@ from pyarrow.fs import (
     FileSystem,
     FileType,
     FSSpecHandler,
+    GcsFileSystem,
     HadoopFileSystem,
     LocalFileSystem,
     PyFileSystem,
@@ -78,6 +79,10 @@ from pyiceberg.expressions.visitors import (
 )
 from pyiceberg.expressions.visitors import visit as boolean_expression_visit
 from pyiceberg.io import (
+    GCS_DEFAULT_LOCATION,
+    GCS_ENDPOINT,
+    GCS_TOKEN,
+    GCS_TOKEN_EXPIRES_AT_MS,
     HDFS_HOST,
     HDFS_KERB_TICKET,
     HDFS_PORT,
@@ -129,6 +134,7 @@ from pyiceberg.types import (
     UUIDType,
 )
 from pyiceberg.utils.concurrent import ManagedThreadPoolExecutor, Synchronized
+from pyiceberg.utils.datetime import millis_to_datetime
 from pyiceberg.utils.singleton import Singleton
 
 if TYPE_CHECKING:
@@ -313,6 +319,19 @@ class PyArrowFileIO(FileIO):
                 "kerb_ticket": self.properties.get(HDFS_KERB_TICKET),
             }
             return HadoopFileSystem(**client_kwargs)
+        elif scheme in {"gs", "gcs"}:
+            gcs_kwargs: Dict[str, Any] = {}
+            if access_token := self.properties.get(GCS_TOKEN):
+                gcs_kwargs["access_token"] = access_token
+            if expiration := self.properties.get(GCS_TOKEN_EXPIRES_AT_MS):
+                gcs_kwargs["credential_token_expiration"] = 
millis_to_datetime(int(expiration))
+            if bucket_location := self.properties.get(GCS_DEFAULT_LOCATION):
+                gcs_kwargs["default_bucket_location"] = bucket_location
+            if endpoint := self.properties.get(GCS_ENDPOINT):
+                url_parts = urlparse(endpoint)
+                gcs_kwargs["scheme"] = url_parts.scheme
+                gcs_kwargs["endpoint_override"] = url_parts.netloc
+            return GcsFileSystem(**gcs_kwargs)
         elif scheme == "file":
             return LocalFileSystem()
         else:
diff --git a/python/pyiceberg/utils/datetime.py 
b/python/pyiceberg/utils/datetime.py
index f43d0fa84b..1e3523ce64 100644
--- a/python/pyiceberg/utils/datetime.py
+++ b/python/pyiceberg/utils/datetime.py
@@ -99,6 +99,12 @@ def datetime_to_millis(dt: datetime) -> int:
     return (delta.days * 86400 + delta.seconds) * 1_000 + delta.microseconds 
// 1_000
 
 
+def millis_to_datetime(millis: int) -> datetime:
+    """Converts milliseconds from epoch to a timestamp."""
+    dt = timedelta(milliseconds=millis)
+    return EPOCH_TIMESTAMP + dt
+
+
 def timestamptz_to_micros(timestamptz_str: str) -> int:
     """Converts an ISO-8601 formatted timestamp with zone to microseconds from 
1970-01-01T00:00:00.000000+00:00."""
     if ISO_TIMESTAMPTZ.fullmatch(timestamptz_str):
diff --git a/python/pyproject.toml b/python/pyproject.toml
index 9f5c3ef190..e27e10da3f 100644
--- a/python/pyproject.toml
+++ b/python/pyproject.toml
@@ -65,6 +65,7 @@ thrift = { version = ">=0.13.0,<1.0.0", optional = true }
 boto3 = { version = ">=1.17.106", optional = true }
 s3fs = { version = ">=2021.08.0,<2024.1.0", optional = true } # Upper bound 
set arbitrarily, to be reassessed in early 2024.
 adlfs = { version = ">=2021.07.0,<2024.1.0", optional = true } # Upper bound 
set arbitrarily, to be reassessed in early 2024.
+gcsfs = { version = ">=2022.8.2,<2024.1.0", optional = true }
 psycopg2-binary = { version = ">=2.9.6", optional = true }
 sqlalchemy = { version = "^2.0.18", optional = true }
 
@@ -108,12 +109,14 @@ adlfs = ["adlfs"]
 dynamodb = ["boto3"]
 zstandard = ["zstandard"]
 sql-postgres = ["sqlalchemy", "psycopg2-binary"]
+gcsfs = ["gcsfs"]
 
 [tool.pytest.ini_options]
 markers = [
-  "s3: marks a test as requiring access to s3 compliant storage (use with 
--aws-access-key-id, --aws-secret-access-key, and --endpoint-url args)",
+  "s3: marks a test as requiring access to s3 compliant storage (use with 
--aws-access-key-id, --aws-secret-access-key, and --endpoint args)",
   "adlfs: marks a test as requiring access to adlfs compliant storage (use 
with --adlfs.account-name, --adlfs.account-key, and --adlfs.endpoint args)",
-  "integration: marks integration tests against Apache Spark"
+  "integration: marks integration tests against Apache Spark",
+  "gcs: marks a test as requiring access to gcs compliant storage (use with 
--gs.token, --gs.project, and --gs.endpoint)"
 ]
 
 [tool.black]
@@ -207,6 +210,10 @@ ignore_missing_imports = true
 module = "adlfs.*"
 ignore_missing_imports = true
 
+[[tool.mypy.overrides]]
+module = "gcsfs.*"
+ignore_missing_imports = true
+
 [[tool.mypy.overrides]]
 module = "packaging.*"
 ignore_missing_imports = true
diff --git a/python/tests/conftest.py b/python/tests/conftest.py
index e5d0860489..e0e2edc0b8 100644
--- a/python/tests/conftest.py
+++ b/python/tests/conftest.py
@@ -28,6 +28,7 @@ import os
 import re
 import string
 import uuid
+from datetime import datetime
 from random import choice
 from tempfile import TemporaryDirectory
 from typing import (
@@ -56,7 +57,15 @@ from pyarrow import parquet as pq
 
 from pyiceberg import schema
 from pyiceberg.catalog import Catalog
-from pyiceberg.io import OutputFile, OutputStream, fsspec
+from pyiceberg.io import (
+    GCS_ENDPOINT,
+    GCS_PROJECT_ID,
+    GCS_TOKEN,
+    GCS_TOKEN_EXPIRES_AT_MS,
+    OutputFile,
+    OutputStream,
+    fsspec,
+)
 from pyiceberg.io.fsspec import FsspecFileIO
 from pyiceberg.io.pyarrow import PyArrowFile, PyArrowFileIO
 from pyiceberg.manifest import DataFile, FileFormat
@@ -78,6 +87,7 @@ from pyiceberg.types import (
     StringType,
     StructType,
 )
+from pyiceberg.utils.datetime import datetime_to_millis
 
 
 def pytest_collection_modifyitems(items: List[pytest.Item]) -> None:
@@ -113,6 +123,13 @@ def pytest_addoption(parser: pytest.Parser) -> None:
         
default="Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==",
         help="The ADLS secret account key for tests marked as adlfs",
     )
+    parser.addoption(
+        "--gcs.endpoint", action="store", default="http://0.0.0.0:4443";, 
help="The GCS endpoint URL for tests marked gcs"
+    )
+    parser.addoption(
+        "--gcs.oauth2.token", action="store", default="anon", help="The GCS 
authentication method for tests marked gcs"
+    )
+    parser.addoption("--gcs.project-id", action="store", default="test", 
help="The GCP project for tests marked gcs")
 
 
 @pytest.fixture(scope="session")
@@ -1297,6 +1314,27 @@ def fsspec_fileio(request: pytest.FixtureRequest) -> 
FsspecFileIO:
     return fsspec.FsspecFileIO(properties=properties)
 
 
[email protected]
+def fsspec_fileio_gcs(request: pytest.FixtureRequest) -> FsspecFileIO:
+    properties = {
+        GCS_ENDPOINT: request.config.getoption("--gcs.endpoint"),
+        GCS_TOKEN: request.config.getoption("--gcs.oauth2.token"),
+        GCS_PROJECT_ID: request.config.getoption("--gcs.project-id"),
+    }
+    return fsspec.FsspecFileIO(properties=properties)
+
+
[email protected]
+def pyarrow_fileio_gcs(request: pytest.FixtureRequest) -> PyArrowFileIO:
+    properties = {
+        GCS_ENDPOINT: request.config.getoption("--gcs.endpoint"),
+        GCS_TOKEN: request.config.getoption("--gcs.oauth2.token"),
+        GCS_PROJECT_ID: request.config.getoption("--gcs.project-id"),
+        GCS_TOKEN_EXPIRES_AT_MS: datetime_to_millis(datetime.now()) + 60 * 
1000,
+    }
+    return PyArrowFileIO(properties=properties)
+
+
 class MockAWSResponse(aiobotocore.awsrequest.AioAWSResponse):
     """A mocked aws response implementation (for test use only).
 
diff --git a/python/tests/io/test_fsspec.py b/python/tests/io/test_fsspec.py
index a92e383e79..f83268b56f 100644
--- a/python/tests/io/test_fsspec.py
+++ b/python/tests/io/test_fsspec.py
@@ -387,6 +387,182 @@ def 
test_writing_avro_file_adlfs(generated_manifest_entry_file: str, adlfs_fsspe
     adlfs_fsspec_fileio.delete(f"abfss://tests/{filename}")
 
 
[email protected]
+def test_fsspec_new_input_file_gcs(fsspec_fileio_gcs: FsspecFileIO) -> None:
+    """Test creating a new input file from a fsspec file-io"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+
+    input_file = fsspec_fileio_gcs.new_input(location=location)
+
+    assert isinstance(input_file, fsspec.FsspecInputFile)
+    assert input_file.location == location
+
+
[email protected]
+def test_fsspec_new_output_file_gcs(fsspec_fileio_gcs: FsspecFileIO) -> None:
+    """Test creating a new output file from an fsspec file-io"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+
+    assert isinstance(output_file, fsspec.FsspecOutputFile)
+    assert output_file.location == location
+
+
[email protected]
+def test_fsspec_write_and_read_file_gcs(fsspec_fileio_gcs: FsspecFileIO) -> 
None:
+    """Test writing and reading a file using FsspecInputFile and 
FsspecOutputFile"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+    with output_file.create() as f:
+        f.write(b"foo")
+
+    input_file = fsspec_fileio_gcs.new_input(location)
+    with input_file.open() as f:
+        assert f.read() == b"foo"
+
+    fsspec_fileio_gcs.delete(input_file)
+
+
[email protected]
+def test_fsspec_getting_length_of_file_gcs(fsspec_fileio_gcs: FsspecFileIO) -> 
None:
+    """Test getting the length of an FsspecInputFile and FsspecOutputFile"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+    with output_file.create() as f:
+        f.write(b"foobar")
+
+    assert len(output_file) == 6
+
+    input_file = fsspec_fileio_gcs.new_input(location=location)
+    assert len(input_file) == 6
+
+    fsspec_fileio_gcs.delete(output_file)
+
+
[email protected]
+def test_fsspec_file_tell_gcs(fsspec_fileio_gcs: FsspecFileIO) -> None:
+    """Test finding cursor position for an fsspec file-io file"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+    with output_file.create() as write_file:
+        write_file.write(b"foobar")
+
+    input_file = fsspec_fileio_gcs.new_input(location=location)
+    with input_file.open() as f:
+        f.seek(0)
+        assert f.tell() == 0
+        f.seek(1)
+        assert f.tell() == 1
+        f.seek(3)
+        assert f.tell() == 3
+        f.seek(0)
+        assert f.tell() == 0
+
+
[email protected]
+def test_fsspec_read_specified_bytes_for_file_gcs(fsspec_fileio_gcs: 
FsspecFileIO) -> None:
+    """Test reading a specified number of bytes from a fsspec file-io file"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+    with output_file.create() as write_file:
+        write_file.write(b"foo")
+
+    input_file = fsspec_fileio_gcs.new_input(location=location)
+    with input_file.open() as f:
+        f.seek(0)
+        assert b"f" == f.read(1)
+        f.seek(0)
+        assert b"fo" == f.read(2)
+        f.seek(1)
+        assert b"o" == f.read(1)
+        f.seek(1)
+        assert b"oo" == f.read(2)
+        f.seek(0)
+        assert b"foo" == f.read(999)  # test reading amount larger than entire 
content length
+
+    fsspec_fileio_gcs.delete(input_file)
+
+
[email protected]
+def test_fsspec_raise_on_opening_file_not_found_gcs(fsspec_fileio_gcs: 
FsspecFileIO) -> None:
+    """Test that a fsspec input file raises appropriately when the gcs file is 
not found"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+    input_file = fsspec_fileio_gcs.new_input(location=location)
+    with pytest.raises(FileNotFoundError) as exc_info:
+        input_file.open().read()
+
+    assert location in str(exc_info.value)
+
+
[email protected]
+def test_checking_if_a_file_exists_gcs(fsspec_fileio_gcs: FsspecFileIO) -> 
None:
+    """Test checking if a file exists"""
+
+    non_existent_file = 
fsspec_fileio_gcs.new_input(location="gs://warehouse/does-not-exist.txt")
+    assert not non_existent_file.exists()
+
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+    assert not output_file.exists()
+    with output_file.create() as f:
+        f.write(b"foo")
+
+    existing_input_file = fsspec_fileio_gcs.new_input(location=location)
+    assert existing_input_file.exists()
+
+    existing_output_file = fsspec_fileio_gcs.new_output(location=location)
+    assert existing_output_file.exists()
+
+    fsspec_fileio_gcs.delete(existing_output_file)
+
+
[email protected]
+def test_closing_a_file_gcs(fsspec_fileio_gcs: FsspecFileIO) -> None:
+    """Test closing an output file and input file"""
+    location = f"gs://warehouse/{uuid.uuid4()}.txt"
+    output_file = fsspec_fileio_gcs.new_output(location=location)
+    with output_file.create() as write_file:
+        write_file.write(b"foo")
+        assert not write_file.closed  # type: ignore
+    assert write_file.closed  # type: ignore
+
+    input_file = fsspec_fileio_gcs.new_input(location=location)
+    f = input_file.open()
+    assert not f.closed  # type: ignore
+    f.close()
+    assert f.closed  # type: ignore
+
+    fsspec_fileio_gcs.delete(location=location)
+
+
[email protected]
+def 
test_fsspec_converting_an_outputfile_to_an_inputfile_gcs(fsspec_fileio_gcs: 
FsspecFileIO) -> None:
+    """Test converting an output file to an input file"""
+    filename = str(uuid.uuid4())
+    output_file = 
fsspec_fileio_gcs.new_output(location=f"gs://warehouse/{filename}")
+    input_file = output_file.to_input_file()
+    assert input_file.location == output_file.location
+
+
[email protected]
+def test_writing_avro_file_gcs(generated_manifest_entry_file: str, 
fsspec_fileio_gcs: FsspecFileIO) -> None:
+    """Test that bytes match when reading a local avro file, writing it using 
fsspec file-io, and then reading it again"""
+    filename = str(uuid.uuid4())
+    with 
PyArrowFileIO().new_input(location=generated_manifest_entry_file).open() as f:
+        b1 = f.read()
+        with 
fsspec_fileio_gcs.new_output(location=f"gs://warehouse/{filename}").create() as 
out_f:
+            out_f.write(b1)
+        with 
fsspec_fileio_gcs.new_input(location=f"gs://warehouse/{filename}").open() as 
in_f:
+            b2 = in_f.read()
+            assert b1 == b2  # Check that bytes of read from local avro file 
match bytes written to s3
+
+    fsspec_fileio_gcs.delete(f"gs://warehouse/{filename}")
+
+
 TEST_URI = "https://iceberg-test-signer";
 
 
diff --git a/python/tests/io/test_pyarrow.py b/python/tests/io/test_pyarrow.py
index 01a8f35b1d..0d0719c152 100644
--- a/python/tests/io/test_pyarrow.py
+++ b/python/tests/io/test_pyarrow.py
@@ -20,6 +20,7 @@ import os
 import tempfile
 from typing import Any, List, Optional
 from unittest.mock import MagicMock, patch
+from uuid import uuid4
 
 import pyarrow as pa
 import pyarrow.parquet as pq
@@ -1345,3 +1346,184 @@ foo: [["a","b","c"]]
 bar: [[1,2,3]]
 baz: [[true,false,null]]"""
     )
+
+
[email protected]
+def test_new_input_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    """Test creating a new input file from a fsspec file-io"""
+    filename = str(uuid4())
+
+    input_file = pyarrow_fileio_gcs.new_input(f"gs://warehouse/{filename}")
+
+    assert isinstance(input_file, PyArrowFile)
+    assert input_file.location == f"gs://warehouse/{filename}"
+
+
[email protected]
+def test_new_output_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    """Test creating a new output file from an fsspec file-io"""
+    filename = str(uuid4())
+
+    output_file = pyarrow_fileio_gcs.new_output(f"gs://warehouse/{filename}")
+
+    assert isinstance(output_file, PyArrowFile)
+    assert output_file.location == f"gs://warehouse/{filename}"
+
+
[email protected]
[email protected](reason="Open issue on Arrow: 
https://github.com/apache/arrow/issues/36993";)
+def test_write_and_read_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    """Test writing and reading a file using FsspecInputFile and 
FsspecOutputFile"""
+    location = f"gs://warehouse/{uuid4()}.txt"
+    output_file = pyarrow_fileio_gcs.new_output(location=location)
+    with output_file.create() as f:
+        assert f.write(b"foo") == 3
+
+    assert output_file.exists()
+
+    input_file = pyarrow_fileio_gcs.new_input(location=location)
+    with input_file.open() as f:
+        assert f.read() == b"foo"
+
+    pyarrow_fileio_gcs.delete(input_file)
+
+
[email protected]
+def test_getting_length_of_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    """Test getting the length of an FsspecInputFile and FsspecOutputFile"""
+    filename = str(uuid4())
+
+    output_file = 
pyarrow_fileio_gcs.new_output(location=f"gs://warehouse/{filename}")
+    with output_file.create() as f:
+        f.write(b"foobar")
+
+    assert len(output_file) == 6
+
+    input_file = 
pyarrow_fileio_gcs.new_input(location=f"gs://warehouse/{filename}")
+    assert len(input_file) == 6
+
+    pyarrow_fileio_gcs.delete(output_file)
+
+
[email protected]
[email protected](reason="Open issue on Arrow: 
https://github.com/apache/arrow/issues/36993";)
+def test_file_tell_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    location = f"gs://warehouse/{uuid4()}"
+
+    output_file = pyarrow_fileio_gcs.new_output(location=location)
+    with output_file.create() as write_file:
+        write_file.write(b"foobar")
+
+    input_file = pyarrow_fileio_gcs.new_input(location=location)
+    with input_file.open() as f:
+        f.seek(0)
+        assert f.tell() == 0
+        f.seek(1)
+        assert f.tell() == 1
+        f.seek(3)
+        assert f.tell() == 3
+        f.seek(0)
+        assert f.tell() == 0
+
+
[email protected]
[email protected](reason="Open issue on Arrow: 
https://github.com/apache/arrow/issues/36993";)
+def test_read_specified_bytes_for_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) 
-> None:
+    location = f"gs://warehouse/{uuid4()}"
+
+    output_file = pyarrow_fileio_gcs.new_output(location=location)
+    with output_file.create() as write_file:
+        write_file.write(b"foo")
+
+    input_file = pyarrow_fileio_gcs.new_input(location=location)
+    with input_file.open() as f:
+        f.seek(0)
+        assert b"f" == f.read(1)
+        f.seek(0)
+        assert b"fo" == f.read(2)
+        f.seek(1)
+        assert b"o" == f.read(1)
+        f.seek(1)
+        assert b"oo" == f.read(2)
+        f.seek(0)
+        assert b"foo" == f.read(999)  # test reading amount larger than entire 
content length
+
+    pyarrow_fileio_gcs.delete(input_file)
+
+
[email protected]
[email protected](reason="Open issue on Arrow: 
https://github.com/apache/arrow/issues/36993";)
+def test_raise_on_opening_file_not_found_gcs(pyarrow_fileio_gcs: 
PyArrowFileIO) -> None:
+    """Test that an fsspec input file raises appropriately when the gcs file 
is not found"""
+
+    filename = str(uuid4())
+    input_file = 
pyarrow_fileio_gcs.new_input(location=f"gs://warehouse/{filename}")
+    with pytest.raises(FileNotFoundError) as exc_info:
+        input_file.open().read()
+
+    assert filename in str(exc_info.value)
+
+
[email protected]
+def test_checking_if_a_file_exists_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> 
None:
+    """Test checking if a file exists"""
+    non_existent_file = 
pyarrow_fileio_gcs.new_input(location="gs://warehouse/does-not-exist.txt")
+    assert not non_existent_file.exists()
+
+    location = f"gs://warehouse/{uuid4()}"
+    output_file = pyarrow_fileio_gcs.new_output(location=location)
+    assert not output_file.exists()
+    with output_file.create() as f:
+        f.write(b"foo")
+
+    existing_input_file = pyarrow_fileio_gcs.new_input(location=location)
+    assert existing_input_file.exists()
+
+    existing_output_file = pyarrow_fileio_gcs.new_output(location=location)
+    assert existing_output_file.exists()
+
+    pyarrow_fileio_gcs.delete(existing_output_file)
+
+
[email protected]
[email protected](reason="Open issue on Arrow: 
https://github.com/apache/arrow/issues/36993";)
+def test_closing_a_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    """Test closing an output file and input file"""
+    filename = str(uuid4())
+    output_file = 
pyarrow_fileio_gcs.new_output(location=f"gs://warehouse/{filename}")
+    with output_file.create() as write_file:
+        write_file.write(b"foo")
+        assert not write_file.closed  # type: ignore
+    assert write_file.closed  # type: ignore
+
+    input_file = 
pyarrow_fileio_gcs.new_input(location=f"gs://warehouse/{filename}")
+    with input_file.open() as f:
+        assert not f.closed  # type: ignore
+    assert f.closed  # type: ignore
+
+    pyarrow_fileio_gcs.delete(f"gs://warehouse/{filename}")
+
+
[email protected]
+def test_converting_an_outputfile_to_an_inputfile_gcs(pyarrow_fileio_gcs: 
PyArrowFileIO) -> None:
+    """Test converting an output file to an input file"""
+    filename = str(uuid4())
+    output_file = 
pyarrow_fileio_gcs.new_output(location=f"gs://warehouse/{filename}")
+    input_file = output_file.to_input_file()
+    assert input_file.location == output_file.location
+
+
[email protected]
[email protected](reason="Open issue on Arrow: 
https://github.com/apache/arrow/issues/36993";)
+def test_writing_avro_file_gcs(generated_manifest_entry_file: str, 
pyarrow_fileio_gcs: PyArrowFileIO) -> None:
+    """Test that bytes match when reading a local avro file, writing it using 
fsspec file-io, and then reading it again"""
+    filename = str(uuid4())
+    with 
PyArrowFileIO().new_input(location=generated_manifest_entry_file).open() as f:
+        b1 = f.read()
+        with 
pyarrow_fileio_gcs.new_output(location=f"gs://warehouse/{filename}").create() 
as out_f:
+            out_f.write(b1)
+        with 
pyarrow_fileio_gcs.new_input(location=f"gs://warehouse/{filename}").open() as 
in_f:
+            b2 = in_f.read()
+            assert b1 == b2  # Check that bytes of read from local avro file 
match bytes written to s3
+
+    pyarrow_fileio_gcs.delete(f"gs://warehouse/{filename}")
diff --git a/python/tests/utils/test_datetime.py 
b/python/tests/utils/test_datetime.py
index 46743399d1..ac7ba54547 100644
--- a/python/tests/utils/test_datetime.py
+++ b/python/tests/utils/test_datetime.py
@@ -19,7 +19,7 @@ from datetime import datetime, timezone, tzinfo
 import pytest
 import pytz
 
-from pyiceberg.utils.datetime import datetime_to_millis
+from pyiceberg.utils.datetime import datetime_to_millis, millis_to_datetime
 
 timezones = [
     pytz.timezone("Etc/GMT"),
@@ -67,3 +67,7 @@ def test_datetime_tz_to_millis(tz: tzinfo) -> None:
     expected = int(dt.timestamp() * 1_000)
     datetime_millis = datetime_to_millis(dt)
     assert datetime_millis == expected
+
+
+def test_millis_to_datetime() -> None:
+    assert millis_to_datetime(1690971805918) == datetime(2023, 8, 2, 10, 23, 
25, 918000)

Reply via email to