This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git
The following commit(s) were added to refs/heads/main by this push:
new b4ad042 [SPARK-53702] Update `tests` to use `4.0.1` and `3.5.7`
b4ad042 is described below
commit b4ad04296c000ee48dadb419aeea38edf6c100b8
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Sep 24 14:51:25 2025 -0700
[SPARK-53702] Update `tests` to use `4.0.1` and `3.5.7`
### What changes were proposed in this pull request?
This PR aims to update `tests` directory to use the latest Spark releases
(4.0.1 and 3.5.7).
### Why are the changes needed?
To use the latest Spark images during E2E tests and benchmark script.
### Does this PR introduce _any_ user-facing change?
No, this is a test-only change.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #337 from dongjoon-hyun/SPARK-53702.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
tests/benchmark/sparkapps.sh | 4 ++--
tests/e2e/python/chainsaw-test.yaml | 4 ++--
.../spark-example-retain-duration.yaml | 4 ++--
tests/e2e/spark-versions/chainsaw-test.yaml | 12 ++++++------
.../state-transition/spark-cluster-example-succeeded.yaml | 4 ++--
tests/e2e/state-transition/spark-example-succeeded.yaml | 4 ++--
tests/e2e/watched-namespaces/spark-example.yaml | 4 ++--
7 files changed, 18 insertions(+), 18 deletions(-)
diff --git a/tests/benchmark/sparkapps.sh b/tests/benchmark/sparkapps.sh
index 3b3d986..a8fed59 100755
--- a/tests/benchmark/sparkapps.sh
+++ b/tests/benchmark/sparkapps.sh
@@ -43,9 +43,9 @@ spec:
spark.kubernetes.driver.request.cores: "100m"
spark.kubernetes.driver.master: "local[1]"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+ spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
runtimeVersions:
- sparkVersion: "4.0.0"
+ sparkVersion: "4.0.1"
---
EOF
done
diff --git a/tests/e2e/python/chainsaw-test.yaml
b/tests/e2e/python/chainsaw-test.yaml
index 88cd07a..6ccd7b5 100644
--- a/tests/e2e/python/chainsaw-test.yaml
+++ b/tests/e2e/python/chainsaw-test.yaml
@@ -23,11 +23,11 @@ spec:
scenarios:
- bindings:
- name: "SPARK_VERSION"
- value: "3.5.6"
+ value: "3.5.7"
- name: "SCALA_VERSION"
value: "2.12"
- name: "IMAGE"
- value: "apache/spark:3.5.6-scala2.12-java17-python3-ubuntu"
+ value: "apache/spark:3.5.7-scala2.12-java17-python3-ubuntu"
steps:
- name: install-spark-application
try:
diff --git
a/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
b/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
index 952bfff..1a410f5 100644
--- a/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
+++ b/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
@@ -29,7 +29,7 @@ spec:
ttlAfterStopMillis: 30000
sparkConf:
spark.executor.instances: "1"
- spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+ spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
runtimeVersions:
- sparkVersion: 4.0.0
+ sparkVersion: 4.0.1
diff --git a/tests/e2e/spark-versions/chainsaw-test.yaml
b/tests/e2e/spark-versions/chainsaw-test.yaml
index 325d2ad..36e8279 100644
--- a/tests/e2e/spark-versions/chainsaw-test.yaml
+++ b/tests/e2e/spark-versions/chainsaw-test.yaml
@@ -23,31 +23,31 @@ spec:
scenarios:
- bindings:
- name: "SPARK_VERSION"
- value: "4.0.0"
+ value: "4.0.1"
- name: "SCALA_VERSION"
value: "2.13"
- name: "JAVA_VERSION"
value: "17"
- name: "IMAGE"
- value: "apache/spark:4.0.0-scala2.13-java17-ubuntu"
+ value: "apache/spark:4.0.1-scala2.13-java17-ubuntu"
- bindings:
- name: "SPARK_VERSION"
- value: "3.5.6"
+ value: "3.5.7"
- name: "SCALA_VERSION"
value: "2.12"
- name: "JAVA_VERSION"
value: "17"
- name: "IMAGE"
- value: 'apache/spark:3.5.6-scala2.12-java17-ubuntu'
+ value: 'apache/spark:3.5.7-scala2.12-java17-ubuntu'
- bindings:
- name: "SPARK_VERSION"
- value: "4.0.0"
+ value: "4.0.1"
- name: "SCALA_VERSION"
value: "2.13"
- name: "JAVA_VERSION"
value: "21"
- name: "IMAGE"
- value: 'apache/spark:4.0.0-java21-scala'
+ value: 'apache/spark:4.0.1-java21-scala'
steps:
- name: install-spark-application
try:
diff --git a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
index 9992591..92d2145 100644
--- a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
@@ -19,14 +19,14 @@ metadata:
namespace: default
spec:
runtimeVersions:
- sparkVersion: "4.0.0"
+ sparkVersion: "4.0.1"
clusterTolerations:
instanceConfig:
initWorkers: 1
minWorkers: 1
maxWorkers: 1
sparkConf:
- spark.kubernetes.container.image: "apache/spark:4.0.0"
+ spark.kubernetes.container.image: "apache/spark:4.0.1"
spark.master.ui.title: "Spark Cluster E2E Test"
spark.master.rest.enabled: "true"
spark.master.rest.host: "0.0.0.0"
diff --git a/tests/e2e/state-transition/spark-example-succeeded.yaml
b/tests/e2e/state-transition/spark-example-succeeded.yaml
index cfd8c21..9fe1d56 100644
--- a/tests/e2e/state-transition/spark-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-example-succeeded.yaml
@@ -25,8 +25,8 @@ spec:
jars: "local:///opt/spark/examples/jars/spark-examples.jar"
sparkConf:
spark.executor.instances: "1"
- spark.kubernetes.container.image:
"apache/spark:4.0.0-scala2.13-java17-ubuntu"
+ spark.kubernetes.container.image:
"apache/spark:4.0.1-scala2.13-java17-ubuntu"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
runtimeVersions:
- sparkVersion: 4.0.0
+ sparkVersion: 4.0.1
scalaVersion: "2.13"
diff --git a/tests/e2e/watched-namespaces/spark-example.yaml
b/tests/e2e/watched-namespaces/spark-example.yaml
index a359527..edaefe6 100644
--- a/tests/e2e/watched-namespaces/spark-example.yaml
+++ b/tests/e2e/watched-namespaces/spark-example.yaml
@@ -25,10 +25,10 @@ spec:
jars: "local:///opt/spark/examples/jars/spark-examples.jar"
sparkConf:
spark.executor.instances: "1"
- spark.kubernetes.container.image:
"apache/spark:4.0.0-scala2.13-java17-ubuntu"
+ spark.kubernetes.container.image:
"apache/spark:4.0.1-scala2.13-java17-ubuntu"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
spark.kubernetes.driver.request.cores: "0.5"
spark.kubernetes.executor.request.cores: "0.5"
runtimeVersions:
- sparkVersion: 4.0.0
+ sparkVersion: 4.0.1
scalaVersion: "2.13"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]