This is an automated email from the ASF dual-hosted git repository.

yzheng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/polaris.git


The following commit(s) were added to refs/heads/main by this push:
     new d03534498 Spark 3.5.6 and Iceberg 1.9.1 (#1960)
d03534498 is described below

commit d03534498b2331a79e77c5eae0f0161f198ad9d6
Author: Yong Zheng <yongzheng0...@gmail.com>
AuthorDate: Wed Jul 16 09:10:40 2025 -0500

    Spark 3.5.6 and Iceberg 1.9.1 (#1960)
    
    * Spark 3.5.6 and Iceberg 1.9.1
    
    * Cleanup
---
 .github/workflows/spark_client_regtests.yml                |  2 +-
 getting-started/eclipselink/docker-compose.yml             |  2 +-
 getting-started/jdbc/docker-compose.yml                    |  2 +-
 getting-started/spark/notebooks/SparkPolaris.ipynb         |  2 +-
 gradle/libs.versions.toml                                  |  2 +-
 plugins/pluginlibs.versions.toml                           |  4 ++--
 plugins/spark/README.md                                    | 14 +++++++-------
 plugins/spark/v3.5/getting-started/README.md               |  2 +-
 plugins/spark/v3.5/getting-started/notebooks/Dockerfile    | 10 +++++-----
 .../v3.5/getting-started/notebooks/SparkPolaris.ipynb      |  2 +-
 plugins/spark/v3.5/regtests/run.sh                         |  2 +-
 plugins/spark/v3.5/regtests/setup.sh                       |  2 +-
 regtests/run.sh                                            |  2 +-
 regtests/run_spark_sql.sh                                  |  2 +-
 regtests/setup.sh                                          |  2 +-
 regtests/t_pyspark/src/iceberg_spark.py                    |  4 ++--
 runtime/admin/distribution/LICENSE                         | 14 +++++++-------
 runtime/distribution/LICENSE                               | 14 +++++++-------
 runtime/server/distribution/LICENSE                        | 14 +++++++-------
 .../in-dev/unreleased/getting-started/using-polaris.md     |  2 +-
 site/content/in-dev/unreleased/polaris-spark-client.md     | 10 +++++-----
 21 files changed, 55 insertions(+), 55 deletions(-)

diff --git a/.github/workflows/spark_client_regtests.yml 
b/.github/workflows/spark_client_regtests.yml
index 6cb6d9347..016d39fec 100644
--- a/.github/workflows/spark_client_regtests.yml
+++ b/.github/workflows/spark_client_regtests.yml
@@ -64,7 +64,7 @@ jobs:
               :polaris-server:quarkusAppPartsBuild --rerun \
               -Dquarkus.container-image.build=true
 
-      # NOTE: the regression test runs with spark 3.5.5 and scala 2.12 in Java 
17. We also have integration
+      # NOTE: the regression test runs with spark 3.5.6 and scala 2.12 in Java 
17. We also have integration
       # tests runs with the existing gradle.yml, which only runs on Java 21. 
Since spark Java compatibility
       # for 3.5 is 8, 11, and 17, we should run spark client with those 
compatible java versions.
       # TODO: add separate spark client CI and run with Java 8, 11 and 17.
diff --git a/getting-started/eclipselink/docker-compose.yml 
b/getting-started/eclipselink/docker-compose.yml
index c438d1fa2..c95dcae29 100644
--- a/getting-started/eclipselink/docker-compose.yml
+++ b/getting-started/eclipselink/docker-compose.yml
@@ -79,7 +79,7 @@ services:
       retries: 15
     command: [
       /opt/spark/bin/spark-sql,
-      --packages, 
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.0,org.apache.iceberg:iceberg-aws-bundle:1.9.0,org.apache.iceberg:iceberg-gcp-bundle:1.9.0,org.apache.iceberg:iceberg-azure-bundle:1.9.0",
+      --packages, 
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1,org.apache.iceberg:iceberg-gcp-bundle:1.9.1,org.apache.iceberg:iceberg-azure-bundle:1.9.1",
       --conf, 
"spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
       --conf, 
"spark.sql.catalog.quickstart_catalog=org.apache.iceberg.spark.SparkCatalog",
       --conf, "spark.sql.catalog.quickstart_catalog.type=rest",
diff --git a/getting-started/jdbc/docker-compose.yml 
b/getting-started/jdbc/docker-compose.yml
index 9c089ba2b..c1423fead 100644
--- a/getting-started/jdbc/docker-compose.yml
+++ b/getting-started/jdbc/docker-compose.yml
@@ -81,7 +81,7 @@ services:
       retries: 15
     command: [
       /opt/spark/bin/spark-sql,
-      --packages, 
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.0,org.apache.iceberg:iceberg-aws-bundle:1.9.0,org.apache.iceberg:iceberg-gcp-bundle:1.9.0,org.apache.iceberg:iceberg-azure-bundle:1.9.0",
+      --packages, 
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1,org.apache.iceberg:iceberg-gcp-bundle:1.9.1,org.apache.iceberg:iceberg-azure-bundle:1.9.1",
       --conf, 
"spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
       --conf, 
"spark.sql.catalog.polaris=org.apache.iceberg.spark.SparkCatalog",
       --conf, "spark.sql.catalog.polaris.type=rest",
diff --git a/getting-started/spark/notebooks/SparkPolaris.ipynb 
b/getting-started/spark/notebooks/SparkPolaris.ipynb
index 7168efaa6..76e046e7c 100644
--- a/getting-started/spark/notebooks/SparkPolaris.ipynb
+++ b/getting-started/spark/notebooks/SparkPolaris.ipynb
@@ -256,7 +256,7 @@
     "\n",
     "spark = (SparkSession.builder\n",
     "  .config(\"spark.sql.catalog.spark_catalog\", 
\"org.apache.iceberg.spark.SparkSessionCatalog\")\n",
-    "  .config(\"spark.jars.packages\", 
\"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.0,org.apache.iceberg:iceberg-aws-bundle:1.9.0\")\n",
+    "  .config(\"spark.jars.packages\", 
\"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1\")\n",
     "  .config('spark.sql.iceberg.vectorization.enabled', 'false')\n",
     "         \n",
     "  # Configure the 'polaris' catalog as an Iceberg rest catalog\n",
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 5fcd40c3b..614da62a3 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -20,7 +20,7 @@
 [versions]
 checkstyle = "10.25.0"
 hadoop = "3.4.1"
-iceberg = "1.9.0" # Ensure to update the iceberg version in regtests to keep 
regtests up-to-date
+iceberg = "1.9.1" # Ensure to update the iceberg version in regtests to keep 
regtests up-to-date
 quarkus = "3.24.3"
 immutables = "2.11.1"
 picocli = "4.7.7"
diff --git a/plugins/pluginlibs.versions.toml b/plugins/pluginlibs.versions.toml
index 0ca3ba5d3..895e286b1 100644
--- a/plugins/pluginlibs.versions.toml
+++ b/plugins/pluginlibs.versions.toml
@@ -18,7 +18,7 @@
 #
 
 [versions]
-iceberg = "1.9.0"
-spark35 = "3.5.5"
+iceberg = "1.9.1"
+spark35 = "3.5.6"
 scala212 = "2.12.19"
 scala213 = "2.13.15"
diff --git a/plugins/spark/README.md b/plugins/spark/README.md
index 9764fc8d1..e925150d6 100644
--- a/plugins/spark/README.md
+++ b/plugins/spark/README.md
@@ -21,12 +21,12 @@
 
 The Polaris Spark plugin provides a SparkCatalog class, which communicates 
with the Polaris
 REST endpoints, and provides implementations for Apache Spark's
-[TableCatalog](https://github.com/apache/spark/blob/v3.5.5/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java),
-[SupportsNamespaces](https://github.com/apache/spark/blob/v3.5.5/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsNamespaces.java),
-[ViewCatalog](https://github.com/apache/spark/blob/v3.5.5/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ViewCatalog.java)
 classes.
+[TableCatalog](https://github.com/apache/spark/blob/v3.5.6/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java),
+[ViewCatalog](https://github.com/apache/spark/blob/v3.5.6/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ViewCatalog.java)
 classes.
+[SupportsNamespaces](https://github.com/apache/spark/blob/v3.5.6/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsNamespaces.java),
 
 Right now, the plugin only provides support for Spark 3.5, Scala version 2.12 
and 2.13,
-and depends on iceberg-spark-runtime 1.9.0.
+and depends on iceberg-spark-runtime 1.9.1.
 
 # Start Spark with local Polaris service using the Polaris Spark plugin
 The following command starts a Polaris server for local testing, it runs on 
localhost:8181 with default
@@ -50,7 +50,7 @@ Run the following command to build the Polaris Spark project 
and publish the sou
 
 ```shell
 bin/spark-shell \
---packages 
org.apache.polaris:polaris-spark-<spark_version>_<scala_version>:<polaris_version>,org.apache.iceberg:iceberg-aws-bundle:1.9.0,io.delta:delta-spark_2.12:3.3.1
 \
+--packages 
org.apache.polaris:polaris-spark-<spark_version>_<scala_version>:<polaris_version>,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1
 \
 --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension
 \
 --conf 
spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog 
\
 --conf spark.sql.catalog.<catalog-name>.warehouse=<catalog-name> \
@@ -73,7 +73,7 @@ The Spark command would look like following:
 
 ```shell
 bin/spark-shell \
---packages 
org.apache.polaris:polaris-spark-3.5_2.12:1.1.0-incubating-SNAPSHOT,org.apache.iceberg:iceberg-aws-bundle:1.9.0,io.delta:delta-spark_2.12:3.3.1
 \
+--packages 
org.apache.polaris:polaris-spark-3.5_2.12:1.1.0-incubating-SNAPSHOT,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1
 \
 --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension
 \
 --conf 
spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog 
\
 --conf spark.sql.catalog.polaris.warehouse=polaris \
@@ -99,7 +99,7 @@ To start Spark using the bundle JAR, specify it with the 
`--jars` option as show
 ```shell
 bin/spark-shell \
 --jars <path-to-spark-client-jar> \
---packages 
org.apache.iceberg:iceberg-aws-bundle:1.9.0,io.delta:delta-spark_2.12:3.3.1 \
+--packages 
org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1 \
 --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension
 \
 --conf 
spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog 
\
 --conf spark.sql.catalog.<catalog-name>.warehouse=<catalog-name> \
diff --git a/plugins/spark/v3.5/getting-started/README.md 
b/plugins/spark/v3.5/getting-started/README.md
index d5aa245ba..582bd177a 100644
--- a/plugins/spark/v3.5/getting-started/README.md
+++ b/plugins/spark/v3.5/getting-started/README.md
@@ -52,7 +52,7 @@ This will spin up 2 container services
 * The `polaris` service for running Apache Polaris using an in-memory metastore
 * The `jupyter` service for running Jupyter notebook with PySpark
 
-NOTE: Starting the container first time may take a couple of minutes, because 
it will need to download the Spark 3.5.5.
+NOTE: Starting the container first time may take a couple of minutes, because 
it will need to download the Spark 3.5.6.
 When working with Delta, the Polaris Spark Client requires delta-io >= 3.2.1, 
and it requires at least Spark 3.5.3, 
 but the current jupyter Spark image only support Spark 3.5.0.
 
diff --git a/plugins/spark/v3.5/getting-started/notebooks/Dockerfile 
b/plugins/spark/v3.5/getting-started/notebooks/Dockerfile
index 2236f57bb..3254ebb55 100644
--- a/plugins/spark/v3.5/getting-started/notebooks/Dockerfile
+++ b/plugins/spark/v3.5/getting-started/notebooks/Dockerfile
@@ -24,11 +24,11 @@ ENV LANGUAGE='en_US:en'
 USER root
 
 # Generic table support requires delta 3.2.1
-# Install Spark 3.5.5
-RUN wget -q 
https://www.apache.org/dyn/closer.lua/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz?action=download
 \
-    && tar -xzf spark-3.5.5-bin-hadoop3.tgz \
-    && mv spark-3.5.5-bin-hadoop3 /opt/spark \
-    && rm spark-3.5.5-bin-hadoop3.tgz
+# Install Spark 3.5.6
+RUN wget -q 
https://www.apache.org/dyn/closer.lua/spark/spark-3.5.6/spark-3.5.6-bin-hadoop3.tgz?action=download
 \
+    && tar -xzf spark-3.5.6-bin-hadoop3.tgz \
+    && mv spark-3.5.6-bin-hadoop3 /opt/spark \
+    && rm spark-3.5.6-bin-hadoop3.tgz
 
 # Set environment variables
 ENV SPARK_HOME=/opt/spark
diff --git a/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb 
b/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb
index 8974a81e2..baa2e94ba 100644
--- a/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb
+++ b/plugins/spark/v3.5/getting-started/notebooks/SparkPolaris.ipynb
@@ -266,7 +266,7 @@
     "\n",
     "spark = (SparkSession.builder\n",
     "  .config(\"spark.jars\", 
\"../polaris_libs/polaris-spark-3.5_2.12-1.1.0-incubating-SNAPSHOT-bundle.jar\")
  # TODO: add a way to automatically discover the Jar\n",
-    "  .config(\"spark.jars.packages\", 
\"org.apache.iceberg:iceberg-aws-bundle:1.9.0,io.delta:delta-spark_2.12:3.2.1\")\n",
+    "  .config(\"spark.jars.packages\", 
\"org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.2.1\")\n",
     "  .config(\"spark.sql.catalog.spark_catalog\", 
\"org.apache.spark.sql.delta.catalog.DeltaCatalog\")\n",
     "  .config('spark.sql.iceberg.vectorization.enabled', 'false')\n",
     "\n",
diff --git a/plugins/spark/v3.5/regtests/run.sh 
b/plugins/spark/v3.5/regtests/run.sh
index 90c2dd755..cc84c0411 100755
--- a/plugins/spark/v3.5/regtests/run.sh
+++ b/plugins/spark/v3.5/regtests/run.sh
@@ -66,7 +66,7 @@ if [[ -n "$CURRENT_SCALA_VERSION" ]]; then
   SCALA_VERSIONS=("${CURRENT_SCALA_VERSION}")
 fi
 SPARK_MAJOR_VERSION="3.5"
-SPARK_VERSION="3.5.5"
+SPARK_VERSION="3.5.6"
 
 SPARK_SHELL_OPTIONS=("PACKAGE" "JAR")
 
diff --git a/plugins/spark/v3.5/regtests/setup.sh 
b/plugins/spark/v3.5/regtests/setup.sh
index 2db6e46ce..1a23d3b5a 100755
--- a/plugins/spark/v3.5/regtests/setup.sh
+++ b/plugins/spark/v3.5/regtests/setup.sh
@@ -36,7 +36,7 @@ set -x
 
 SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
 
-SPARK_VERSION=3.5.5
+SPARK_VERSION=3.5.6
 SCALA_VERSION=2.12
 POLARIS_CLIENT_JAR=""
 POLARIS_VERSION=""
diff --git a/regtests/run.sh b/regtests/run.sh
index 5c65cd1f1..d1472a23c 100755
--- a/regtests/run.sh
+++ b/regtests/run.sh
@@ -20,7 +20,7 @@
 # Run without args to run all tests, or single arg for single test.
 SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
 
-export SPARK_VERSION=spark-3.5.5
+export SPARK_VERSION=spark-3.5.6
 export SPARK_DISTRIBUTION=${SPARK_VERSION}-bin-hadoop3
 
 if [ -z "${SPARK_HOME}" ]; then
diff --git a/regtests/run_spark_sql.sh b/regtests/run_spark_sql.sh
index 8558c9493..4a8eaf904 100755
--- a/regtests/run_spark_sql.sh
+++ b/regtests/run_spark_sql.sh
@@ -46,7 +46,7 @@ fi
 REGTEST_HOME=$(dirname $(realpath $0))
 cd ${REGTEST_HOME}
 
-export SPARK_VERSION=spark-3.5.5
+export SPARK_VERSION=spark-3.5.6
 export SPARK_DISTRIBUTION=${SPARK_VERSION}-bin-hadoop3
 export SPARK_LOCAL_HOSTNAME=localhost # avoid VPN messing up driver local IP 
address binding
 
diff --git a/regtests/setup.sh b/regtests/setup.sh
index d5ca74d53..2c19a6d94 100755
--- a/regtests/setup.sh
+++ b/regtests/setup.sh
@@ -31,7 +31,7 @@ if [ -z "${SPARK_HOME}" ]; then
 fi
 SPARK_CONF="${SPARK_HOME}/conf/spark-defaults.conf"
 DERBY_HOME="/tmp/derby"
-ICEBERG_VERSION="1.9.0"
+ICEBERG_VERSION="1.9.1"
 export 
PYTHONPATH="${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
 
 # Ensure binaries are downloaded locally
diff --git a/regtests/t_pyspark/src/iceberg_spark.py 
b/regtests/t_pyspark/src/iceberg_spark.py
index f1bc295a7..23a5465cf 100644
--- a/regtests/t_pyspark/src/iceberg_spark.py
+++ b/regtests/t_pyspark/src/iceberg_spark.py
@@ -73,8 +73,8 @@ class IcebergSparkSession:
     """Initial method for Iceberg Spark session. Creates a Spark session with 
specified configs.
     """
     packages = [
-      "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.0",
-      "org.apache.iceberg:iceberg-aws-bundle:1.9.0",
+      "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1",
+      "org.apache.iceberg:iceberg-aws-bundle:1.9.1",
     ]
     excludes = ["org.checkerframework:checker-qual", 
"com.google.errorprone:error_prone_annotations"]
 
diff --git a/runtime/admin/distribution/LICENSE 
b/runtime/admin/distribution/LICENSE
index 56bdc703b..bafa2067a 100644
--- a/runtime/admin/distribution/LICENSE
+++ b/runtime/admin/distribution/LICENSE
@@ -1003,13 +1003,13 @@ License: Apache License 2.0 - 
https://www.apache.org/licenses/LICENSE-2.0.txt
 
 
--------------------------------------------------------------------------------
 
-Group: org.apache.iceberg Name: iceberg-api Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-aws Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-azure Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-bundled-guava Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-common Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-core Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-gcp Version: 1.9.0
+Group: org.apache.iceberg Name: iceberg-api Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-aws Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-azure Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-bundled-guava Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-common Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-core Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-gcp Version: 1.9.1
 Project URL: https://iceberg.apache.org/
 License: Apache License 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt
 
diff --git a/runtime/distribution/LICENSE b/runtime/distribution/LICENSE
index c627cda9b..f44115d50 100644
--- a/runtime/distribution/LICENSE
+++ b/runtime/distribution/LICENSE
@@ -1301,13 +1301,13 @@ License: Apache License 2.0 - 
https://www.apache.org/licenses/LICENSE-2.0.txt
 
 
--------------------------------------------------------------------------------
 
-Group: org.apache.iceberg Name: iceberg-api Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-aws Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-azure Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-bundled-guava Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-common Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-core Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-gcp Version: 1.9.0
+Group: org.apache.iceberg Name: iceberg-api Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-aws Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-azure Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-bundled-guava Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-common Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-core Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-gcp Version: 1.9.1
 Project URL: https://iceberg.apache.org/
 License: Apache License 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt
 
diff --git a/runtime/server/distribution/LICENSE 
b/runtime/server/distribution/LICENSE
index 940eb397a..b675a3246 100644
--- a/runtime/server/distribution/LICENSE
+++ b/runtime/server/distribution/LICENSE
@@ -1295,13 +1295,13 @@ License: Apache License 2.0 - 
https://www.apache.org/licenses/LICENSE-2.0.txt
 
 
--------------------------------------------------------------------------------
 
-Group: org.apache.iceberg Name: iceberg-api Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-aws Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-azure Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-bundled-guava Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-common Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-core Version: 1.9.0
-Group: org.apache.iceberg Name: iceberg-gcp Version: 1.9.0
+Group: org.apache.iceberg Name: iceberg-api Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-aws Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-azure Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-bundled-guava Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-common Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-core Version: 1.9.1
+Group: org.apache.iceberg Name: iceberg-gcp Version: 1.9.1
 Project URL: https://iceberg.apache.org/
 License: Apache License 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt
 
diff --git a/site/content/in-dev/unreleased/getting-started/using-polaris.md 
b/site/content/in-dev/unreleased/getting-started/using-polaris.md
index 1403e7b02..39a53ff97 100644
--- a/site/content/in-dev/unreleased/getting-started/using-polaris.md
+++ b/site/content/in-dev/unreleased/getting-started/using-polaris.md
@@ -158,7 +158,7 @@ _Note: the credentials provided here are those for our 
principal, not the root c
 
 ```shell
 bin/spark-sql \
---packages 
org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.0,org.apache.iceberg:iceberg-aws-bundle:1.9.0
 \
+--packages 
org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.9.1,org.apache.iceberg:iceberg-aws-bundle:1.9.1
 \
 --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions
 \
 --conf spark.sql.catalog.quickstart_catalog.warehouse=quickstart_catalog \
 --conf 
spark.sql.catalog.quickstart_catalog.header.X-Iceberg-Access-Delegation=vended-credentials
 \
diff --git a/site/content/in-dev/unreleased/polaris-spark-client.md 
b/site/content/in-dev/unreleased/polaris-spark-client.md
index e59657d52..3d597f19f 100644
--- a/site/content/in-dev/unreleased/polaris-spark-client.md
+++ b/site/content/in-dev/unreleased/polaris-spark-client.md
@@ -44,12 +44,12 @@ git clone https://github.com/apache/polaris.git ~/polaris
 
 ## Start Spark against a deployed Polaris service
 Before starting, ensure that the deployed Polaris service supports Generic 
Tables, and that Spark 3.5(version 3.5.3 or later is installed).
-Spark 3.5.5 is recommended, and you can follow the instructions below to get a 
Spark 3.5.5 distribution.
+Spark 3.5.6 is recommended, and you can follow the instructions below to get a 
Spark 3.5.6 distribution.
 ```shell
 cd ~
-wget 
https://www.apache.org/dyn/closer.lua/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz?action=download
+wget 
https://www.apache.org/dyn/closer.lua/spark/spark-3.5.6/spark-3.5.6-bin-hadoop3.tgz?action=download
 mkdir spark-3.5
-tar xzvf spark-3.5.5-bin-hadoop3.tgz  -C spark-3.5 --strip-components=1
+tar xzvf spark-3.5.6-bin-hadoop3.tgz -C spark-3.5 --strip-components=1
 cd spark-3.5
 ```
 
@@ -59,7 +59,7 @@ a released Polaris Spark client.
 
 ```shell
 bin/spark-shell \
---packages 
<polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.9.0,io.delta:delta-spark_2.12:3.3.1
 \
+--packages 
<polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1
 \
 --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension
 \
 --conf 
spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog 
\
 --conf spark.sql.catalog.<spark-catalog-name>.warehouse=<polaris-catalog-name> 
\
@@ -87,7 +87,7 @@ You can also start the connection by programmatically 
initialize a SparkSession,
 from pyspark.sql import SparkSession
 
 spark = SparkSession.builder
-  .config("spark.jars.packages", 
"<polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.9.0,io.delta:delta-spark_2.12:3.3.1")
+  .config("spark.jars.packages", 
"<polaris-spark-client-package>,org.apache.iceberg:iceberg-aws-bundle:1.9.1,io.delta:delta-spark_2.12:3.3.1")
   .config("spark.sql.catalog.spark_catalog", 
"org.apache.spark.sql.delta.catalog.DeltaCatalog")
   .config("spark.sql.extensions", 
"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension")
   .config("spark.sql.catalog.<spark-catalog-name>", 
"org.apache.polaris.spark.SparkCatalog")

Reply via email to