This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 2d534bd2c0 [KYUUBI #7284] Upgrade Flink and Spark to latest patched
version
2d534bd2c0 is described below
commit 2d534bd2c0ca337b0b84d56b46a5ca6753434761
Author: Cheng Pan <[email protected]>
AuthorDate: Thu Dec 25 20:52:05 2025 +0800
[KYUUBI #7284] Upgrade Flink and Spark to latest patched version
### Why are the changes needed?
Test with latest patched version.
Flink 1.19.1 => 1.19.3
Flink 1.20.0 => 1.20.3
Spark 3.4.3 => 3.4.4
Spark 3.5.5 => 3.5.7
### How was this patch tested?
Pass GHA.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #7284 from pan3793/version-bump.
Closes #7284
8a04411d2 [Cheng Pan] Upgrade Flink and Spark to latest patched version
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.github/workflows/master.yml | 10 +++++-----
bin/docker-image-tool.sh | 4 ++--
docker/playground/.env | 2 +-
docs/deployment/kyuubi_on_kubernetes.md | 4 ++--
docs/extensions/engines/spark/lineage.md | 2 +-
.../kyuubi/spark/connector/hive/HiveConnectorUtils.scala | 2 +-
.../test/deployment/KyuubiOnKubernetesTestsSuite.scala | 2 +-
.../kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala | 2 +-
pom.xml | 12 ++++++------
9 files changed, 20 insertions(+), 20 deletions(-)
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 242e54456f..2a9dce5f88 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -82,7 +82,7 @@ jobs:
- java: 8
python: '3.9'
spark: '3.5'
- spark-archive:
'-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-3.4.3
-Dspark.archive.name=spark-3.4.3-bin-hadoop3.tgz -Pzookeeper-3.6'
+ spark-archive:
'-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-3.4.4
-Dspark.archive.name=spark-3.4.4-bin-hadoop3.tgz -Pzookeeper-3.6'
exclude-tags:
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.4-binary'
- java: 17
@@ -277,7 +277,7 @@ jobs:
comment: 'verify-on-flink-1.18-binary'
- java: 8
flink: '1.20'
- flink-archive:
'-Dflink.archive.mirror=https://www.apache.org/dyn/closer.lua/flink/flink-1.19.1
-Dflink.archive.name=flink-1.19.1-bin-scala_2.12.tgz'
+ flink-archive:
'-Dflink.archive.mirror=https://www.apache.org/dyn/closer.lua/flink/flink-1.19.3
-Dflink.archive.name=flink-1.19.3-bin-scala_2.12.tgz'
comment: 'verify-on-flink-1.19-binary'
steps:
- uses: actions/checkout@v4
@@ -439,14 +439,14 @@ jobs:
cache-binary: false
- name: Pull Spark image
run: |
- docker pull apache/spark:3.5.5
+ docker pull apache/spark:3.5.7
- name: Build Kyuubi Docker Image
uses: docker/build-push-action@v6
with:
# passthrough CI into build container
build-args: |
CI=${CI}
- BASE_IMAGE=apache/spark:3.5.5
+ BASE_IMAGE=apache/spark:3.5.7
MVN_ARG=--spark-provided --flink-provided --hive-provided
context: .
file: build/Dockerfile.CI
@@ -463,7 +463,7 @@ jobs:
#
https://minikube.sigs.k8s.io/docs/handbook/pushing/#7-loading-directly-to-in-cluster-container-runtime
minikube image load apache/kyuubi:ci
# pre-install spark into minikube
- minikube image load apache/spark:3.5.5
+ minikube image load apache/spark:3.5.7
- name: kubectl pre-check
run: |
kubectl get nodes
diff --git a/bin/docker-image-tool.sh b/bin/docker-image-tool.sh
index ba7f0eb79a..f4684ab09e 100755
--- a/bin/docker-image-tool.sh
+++ b/bin/docker-image-tool.sh
@@ -181,8 +181,8 @@ Examples:
$0 -r docker.io/myrepo -t v1.8.1 build
$0 -r docker.io/myrepo -t v1.8.1 push
- - Build and push with tag "v1.8.1" and Spark-3.5.5 as base image to
docker.io/myrepo
- $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.5 build
+ - Build and push with tag "v1.8.1" and Spark-3.5.7 as base image to
docker.io/myrepo
+ $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.7 build
$0 -r docker.io/myrepo -t v1.8.1 push
- Build and push for multiple archs to docker.io/myrepo
diff --git a/docker/playground/.env b/docker/playground/.env
index 89f1ac5bfa..28a97a0d24 100644
--- a/docker/playground/.env
+++ b/docker/playground/.env
@@ -24,7 +24,7 @@ KYUUBI_HADOOP_VERSION=3.3.6
POSTGRES_VERSION=12
POSTGRES_JDBC_VERSION=42.3.4
SCALA_BINARY_VERSION=2.12
-SPARK_VERSION=3.4.3
+SPARK_VERSION=3.4.4
SPARK_BINARY_VERSION=3.4
SPARK_HADOOP_VERSION=3.3.4
ZOOKEEPER_VERSION=3.6.3
diff --git a/docs/deployment/kyuubi_on_kubernetes.md
b/docs/deployment/kyuubi_on_kubernetes.md
index 0a81ab9229..a853a922c9 100644
--- a/docs/deployment/kyuubi_on_kubernetes.md
+++ b/docs/deployment/kyuubi_on_kubernetes.md
@@ -42,8 +42,8 @@ Examples:
$0 -r docker.io/myrepo -t v1.8.1 build
$0 -r docker.io/myrepo -t v1.8.1 push
- - Build and push with tag "v1.8.1" and Spark-3.5.5 as base image to
docker.io/myrepo
- $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.5 build
+ - Build and push with tag "v1.8.1" and Spark-3.5.7 as base image to
docker.io/myrepo
+ $0 -r docker.io/myrepo -t v1.8.1 -b BASE_IMAGE=repo/spark:3.5.7 build
$0 -r docker.io/myrepo -t v1.8.1 push
- Build and push for multiple archs to docker.io/myrepo
diff --git a/docs/extensions/engines/spark/lineage.md
b/docs/extensions/engines/spark/lineage.md
index a5d445863e..083ca37111 100644
--- a/docs/extensions/engines/spark/lineage.md
+++ b/docs/extensions/engines/spark/lineage.md
@@ -117,7 +117,7 @@ Sometimes, it may be incompatible with other Spark
distributions, then you may n
For example,
```shell
-build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests
-Dspark.version=3.5.5
+build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests
-Dspark.version=3.5.7
```
The available `spark.version`s are shown in the following table.
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveConnectorUtils.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveConnectorUtils.scala
index 371d79abe7..2a86f31fd6 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveConnectorUtils.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveConnectorUtils.scala
@@ -122,7 +122,7 @@ object HiveConnectorUtils extends Logging {
isSplitable,
maxSplitBytes,
partitionValues)
- }.recover { case _: Exception => // SPARK-51185: Spark 3.5.5
+ }.recover { case _: Exception => // SPARK-51185: Spark 3.5.7
val fileStatusWithMetadataClz = DynClasses.builder()
.impl("org.apache.spark.sql.execution.datasources.FileStatusWithMetadata")
.buildChecked()
diff --git
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
index 2a17d182ef..9313494c6a 100644
---
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
+++
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
@@ -56,7 +56,7 @@ class KyuubiOnKubernetesWithSparkTestsBase extends
WithKyuubiServerOnKubernetes
Map(
"spark.master" -> s"k8s://$miniKubeApiMaster",
// We should update spark docker image in
./github/workflows/master.yml at the same time
- "spark.kubernetes.container.image" -> "apache/spark:3.5.5",
+ "spark.kubernetes.container.image" -> "apache/spark:3.5.7",
"spark.kubernetes.container.image.pullPolicy" -> "IfNotPresent",
"spark.executor.memory" -> "512M",
"spark.driver.memory" -> "1024M",
diff --git
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
index 562ee63799..092e799617 100644
---
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
+++
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
@@ -51,7 +51,7 @@ abstract class SparkOnKubernetesSuiteBase
// TODO Support more Spark version
// Spark official docker image: https://hub.docker.com/r/apache/spark/tags
KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
- .set("spark.kubernetes.container.image", "apache/spark:3.5.5")
+ .set("spark.kubernetes.container.image", "apache/spark:3.5.7")
.set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
.set("spark.executor.instances", "1")
.set("spark.executor.memory", "512M")
diff --git a/pom.xml b/pom.xml
index e01fa22497..6f3a81deb4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -141,7 +141,7 @@
<failsafe.verion>3.3.2</failsafe.verion>
<fb303.version>0.9.3</fb303.version>
<flexmark.version>0.62.2</flexmark.version>
- <flink.version>1.20.0</flink.version>
+ <flink.version>1.20.3</flink.version>
<flink.archive.name>flink-${flink.version}-bin-scala_2.12.tgz</flink.archive.name>
<flink.archive.mirror>${apache.archive.dist}/flink/flink-${flink.version}</flink.archive.mirror>
<flink.archive.query>?action=download</flink.archive.query>
@@ -207,7 +207,7 @@
DO NOT forget to change the following properties when change the
minor version of Spark:
`delta.version`, `delta.artifact`,
`maven.plugin.scalatest.exclude.tags`
-->
- <spark.version>3.5.5</spark.version>
+ <spark.version>3.5.7</spark.version>
<spark.binary.version>3.5</spark.binary.version>
<spark.archive.scala.suffix></spark.archive.scala.suffix>
<spark.archive.name>spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz</spark.archive.name>
@@ -2012,7 +2012,7 @@
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
- <spark.version>3.4.3</spark.version>
+ <spark.version>3.4.4</spark.version>
<spark.binary.version>3.4</spark.binary.version>
<delta.version>2.4.0</delta.version>
<delta.artifact>delta-core_${scala.binary.version}</delta.artifact>
@@ -2027,7 +2027,7 @@
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
- <spark.version>3.5.5</spark.version>
+ <spark.version>3.5.7</spark.version>
<spark.binary.version>3.5</spark.binary.version>
<delta.version>3.3.1</delta.version>
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>
@@ -2125,14 +2125,14 @@
<profile>
<id>flink-1.19</id>
<properties>
- <flink.version>1.19.1</flink.version>
+ <flink.version>1.19.3</flink.version>
</properties>
</profile>
<profile>
<id>flink-1.20</id>
<properties>
- <flink.version>1.20.0</flink.version>
+ <flink.version>1.20.3</flink.version>
</properties>
</profile>