This is an automated email from the ASF dual-hosted git repository.
yufei pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/polaris.git
The following commit(s) were added to refs/heads/main by this push:
new 7f502abc Update Spark version in doc (#1040)
7f502abc is described below
commit 7f502abcb147b9b8ffed3a4d619637bcf214737b
Author: Prashant Singh <[email protected]>
AuthorDate: Fri Feb 21 09:49:41 2025 -0800
Update Spark version in doc (#1040)
---
getting-started/spark/notebooks/Dockerfile | 2 +-
.../java/org/apache/polaris/service/it/env/PolarisApiEndpoints.java | 2 +-
regtests/run.sh | 2 +-
regtests/run_spark_sql.sh | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/getting-started/spark/notebooks/Dockerfile
b/getting-started/spark/notebooks/Dockerfile
index 2e3371ab..d0813222 100644
--- a/getting-started/spark/notebooks/Dockerfile
+++ b/getting-started/spark/notebooks/Dockerfile
@@ -17,7 +17,7 @@
# under the License.
#
-FROM jupyter/all-spark-notebook:spark-3.5.0
+FROM jupyter/all-spark-notebook:spark-3.5.4
COPY --chown=jovyan regtests/client /home/jovyan/client
COPY --chown=jovyan regtests/requirements.txt /tmp
diff --git
a/integration-tests/src/main/java/org/apache/polaris/service/it/env/PolarisApiEndpoints.java
b/integration-tests/src/main/java/org/apache/polaris/service/it/env/PolarisApiEndpoints.java
index 7212dd80..45fe7809 100644
---
a/integration-tests/src/main/java/org/apache/polaris/service/it/env/PolarisApiEndpoints.java
+++
b/integration-tests/src/main/java/org/apache/polaris/service/it/env/PolarisApiEndpoints.java
@@ -23,7 +23,7 @@ import java.net.URI;
/**
* This class contains the most fundamental information for accessing Polaris
APIs, such as the base
- * URI and realm ID and provides methods for obtaining Icenberg REST API and
Polaris Management
+ * URI and realm ID and provides methods for obtaining Iceberg REST API and
Polaris Management
* endpoints.
*/
public final class PolarisApiEndpoints implements Serializable {
diff --git a/regtests/run.sh b/regtests/run.sh
index 405e8171..a73caf70 100755
--- a/regtests/run.sh
+++ b/regtests/run.sh
@@ -20,7 +20,7 @@
# Run without args to run all tests, or single arg for single test.
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
-export SPARK_VERSION=spark-3.5.3
+export SPARK_VERSION=spark-3.5.4
export SPARK_DISTRIBUTION=${SPARK_VERSION}-bin-hadoop3
if [ -z "${SPARK_HOME}" ]; then
diff --git a/regtests/run_spark_sql.sh b/regtests/run_spark_sql.sh
index 7ba7a325..af662a48 100755
--- a/regtests/run_spark_sql.sh
+++ b/regtests/run_spark_sql.sh
@@ -46,7 +46,7 @@ fi
REGTEST_HOME=$(dirname $(realpath $0))
cd ${REGTEST_HOME}
-export SPARK_VERSION=spark-3.5.3
+export SPARK_VERSION=spark-3.5.4
export SPARK_DISTRIBUTION=${SPARK_VERSION}-bin-hadoop3
export SPARK_LOCAL_HOSTNAME=localhost # avoid VPN messing up driver local IP
address binding