This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 1d1e8a0a3b [KYUUBI #6842] Bump Spark 3.5.4
1d1e8a0a3b is described below
commit 1d1e8a0a3b122c1f4baf4af6e26a729ab23c9cdd
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Dec 23 11:21:45 2024 +0800
[KYUUBI #6842] Bump Spark 3.5.4
### Why are the changes needed?
Spark 3.5.4 is released
https://spark.apache.org/releases/spark-release-3-5-4.html
### How was this patch tested?
Pas GHA
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #6842 from pan3793/spark-3.5.4.
Closes #6842
0fb7ad8a0 [Cheng Pan] ga
8eacc9c97 [Cheng Pan] Spark 3.5.4 RC2
0721fa401 [Cheng Pan] fix
49e98a201 [Cheng Pan] maven repo
951db0c82 [Cheng Pan] Spark 3.5.4
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.github/workflows/master.yml | 4 ++--
bin/docker-image-tool.sh | 4 ++--
docs/deployment/kyuubi_on_kubernetes.md | 4 ++--
docs/extensions/engines/spark/lineage.md | 2 +-
.../kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala | 2 +-
.../kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala | 2 +-
pom.xml | 4 ++--
7 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 1997908093..5b347f2434 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -444,8 +444,8 @@ jobs:
#
https://minikube.sigs.k8s.io/docs/handbook/pushing/#7-loading-directly-to-in-cluster-container-runtime
minikube image load apache/kyuubi:latest
# pre-install spark into minikube
- docker pull apache/spark:3.5.2
- minikube image load apache/spark:3.5.2
+ docker pull apache/spark:3.5.4
+ minikube image load apache/spark:3.5.4
- name: kubectl pre-check
run: |
kubectl get nodes
diff --git a/bin/docker-image-tool.sh b/bin/docker-image-tool.sh
index 7f6f1d6236..5a495c9348 100755
--- a/bin/docker-image-tool.sh
+++ b/bin/docker-image-tool.sh
@@ -181,8 +181,8 @@ Examples:
$0 -r docker.io/myrepo -t v1.8.1 build
$0 -r docker.io/myrepo -t v1.8.1 push
- - Build and push with tag "v1.8.1" and Spark-3.5.2 as base image to
docker.io/myrepo
- $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.2 build
+ - Build and push with tag "v1.8.1" and Spark-3.5.4 as base image to
docker.io/myrepo
+ $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.4 build
$0 -r docker.io/myrepo -t v1.8.1 push
- Build and push for multiple archs to docker.io/myrepo
diff --git a/docs/deployment/kyuubi_on_kubernetes.md
b/docs/deployment/kyuubi_on_kubernetes.md
index 1d331b5676..80f3e7509b 100644
--- a/docs/deployment/kyuubi_on_kubernetes.md
+++ b/docs/deployment/kyuubi_on_kubernetes.md
@@ -42,8 +42,8 @@ Examples:
$0 -r docker.io/myrepo -t v1.8.1 build
$0 -r docker.io/myrepo -t v1.8.1 push
- - Build and push with tag "v1.8.1" and Spark-3.5.2 as base image to
docker.io/myrepo
- $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.2 build
+ - Build and push with tag "v1.8.1" and Spark-3.5.4 as base image to
docker.io/myrepo
+ $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.4 build
$0 -r docker.io/myrepo -t v1.8.1 push
- Build and push for multiple archs to docker.io/myrepo
diff --git a/docs/extensions/engines/spark/lineage.md
b/docs/extensions/engines/spark/lineage.md
index 26e284df53..a2414269c7 100644
--- a/docs/extensions/engines/spark/lineage.md
+++ b/docs/extensions/engines/spark/lineage.md
@@ -117,7 +117,7 @@ Sometimes, it may be incompatible with other Spark
distributions, then you may n
For example,
```shell
-build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests
-Dspark.version=3.5.2
+build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests
-Dspark.version=3.5.4
```
The available `spark.version`s are shown in the following table.
diff --git
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
index ead11b3357..10565087a6 100644
---
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
+++
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
@@ -56,7 +56,7 @@ class KyuubiOnKubernetesWithSparkTestsBase extends
WithKyuubiServerOnKubernetes
Map(
"spark.master" -> s"k8s://$miniKubeApiMaster",
// We should update spark docker image in
./github/workflows/master.yml at the same time
- "spark.kubernetes.container.image" -> "apache/spark:3.5.2",
+ "spark.kubernetes.container.image" -> "apache/spark:3.5.4",
"spark.kubernetes.container.image.pullPolicy" -> "IfNotPresent",
"spark.executor.memory" -> "512M",
"spark.driver.memory" -> "1024M",
diff --git
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
index efbe1e2e96..0aff5ca798 100644
---
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
+++
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
@@ -51,7 +51,7 @@ abstract class SparkOnKubernetesSuiteBase
// TODO Support more Spark version
// Spark official docker image: https://hub.docker.com/r/apache/spark/tags
KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
- .set("spark.kubernetes.container.image", "apache/spark:3.5.2")
+ .set("spark.kubernetes.container.image", "apache/spark:3.5.4")
.set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
.set("spark.executor.instances", "1")
.set("spark.executor.memory", "512M")
diff --git a/pom.xml b/pom.xml
index 7314636324..f9ecb5ae30 100644
--- a/pom.xml
+++ b/pom.xml
@@ -200,7 +200,7 @@
DO NOT forget to change the following properties when change the
minor version of Spark:
`delta.version`, `delta.artifact`,
`maven.plugin.scalatest.exclude.tags`
-->
- <spark.version>3.5.2</spark.version>
+ <spark.version>3.5.4</spark.version>
<spark.binary.version>3.5</spark.binary.version>
<spark.archive.scala.suffix></spark.archive.scala.suffix>
<spark.archive.name>spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz</spark.archive.name>
@@ -2030,7 +2030,7 @@
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
- <spark.version>3.5.2</spark.version>
+ <spark.version>3.5.4</spark.version>
<spark.binary.version>3.5</spark.binary.version>
<delta.version>3.2.0</delta.version>
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>