This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git
The following commit(s) were added to refs/heads/main by this push:
new 41e7c3b [SPARK-49706] Use `apache/spark` images instead of `spark`
41e7c3b is described below
commit 41e7c3bba5857f6a678570c64e1c360375395611
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Sep 18 18:16:35 2024 -0700
[SPARK-49706] Use `apache/spark` images instead of `spark`
### What changes were proposed in this pull request?
This PR aims to propose to use `apache/spark` images instead of `spark`
because `apache/spark` images are published first. For example, the following
are only available in `apache/spark` as of now.
- https://github.com/apache/spark-docker/pull/66
- https://github.com/apache/spark-docker/pull/67
- https://github.com/apache/spark-docker/pull/68
### Why are the changes needed?
To apply the latest bits earlier.
### Does this PR introduce _any_ user-facing change?
There is no change from `Apache Spark K8s Operator`.
Only the underlying images are changed.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #128 from dongjoon-hyun/SPARK-49706.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
README.md | 2 +-
examples/cluster-on-yunikorn.yaml | 2 +-
examples/cluster-with-template.yaml | 2 +-
examples/pi-on-yunikorn.yaml | 2 +-
examples/pi-scala.yaml | 2 +-
examples/pi-with-one-pod.yaml | 2 +-
examples/pi.yaml | 2 +-
examples/prod-cluster-with-three-workers.yaml | 2 +-
examples/pyspark-pi.yaml | 2 +-
examples/qa-cluster-with-one-worker.yaml | 2 +-
examples/sql.yaml | 2 +-
tests/e2e/python/chainsaw-test.yaml | 4 ++--
tests/e2e/spark-versions/chainsaw-test.yaml | 2 +-
tests/e2e/state-transition/spark-cluster-example-succeeded.yaml | 2 +-
tests/e2e/state-transition/spark-example-succeeded.yaml | 2 +-
tests/e2e/watched-namespaces/spark-example.yaml | 2 +-
16 files changed, 17 insertions(+), 17 deletions(-)
diff --git a/README.md b/README.md
index e9cdee7..e306889 100644
--- a/README.md
+++ b/README.md
@@ -100,7 +100,7 @@ Events:
Normal Scheduled 14s yunikorn Successfully assigned
default/pi-on-yunikorn-0-driver to node docker-desktop
Normal PodBindSuccessful 14s yunikorn Pod
default/pi-on-yunikorn-0-driver is successfully bound to node docker-desktop
Normal TaskCompleted 6s yunikorn Task
default/pi-on-yunikorn-0-driver is completed
- Normal Pulled 13s kubelet Container image
"spark:4.0.0-preview1" already present on machine
+ Normal Pulled 13s kubelet Container image
"apache/spark:4.0.0-preview1" already present on machine
Normal Created 13s kubelet Created container
spark-kubernetes-driver
Normal Started 13s kubelet Started container
spark-kubernetes-driver
diff --git a/examples/cluster-on-yunikorn.yaml
b/examples/cluster-on-yunikorn.yaml
index 0032c84..4c1d142 100644
--- a/examples/cluster-on-yunikorn.yaml
+++ b/examples/cluster-on-yunikorn.yaml
@@ -25,7 +25,7 @@ spec:
minWorkers: 1
maxWorkers: 1
sparkConf:
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
spark.kubernetes.scheduler.name: "yunikorn"
spark.master.ui.title: "Spark Cluster on YuniKorn Scheduler"
spark.master.rest.enabled: "true"
diff --git a/examples/cluster-with-template.yaml
b/examples/cluster-with-template.yaml
index c0d17b8..69add4d 100644
--- a/examples/cluster-with-template.yaml
+++ b/examples/cluster-with-template.yaml
@@ -87,7 +87,7 @@ spec:
annotations:
customAnnotation: "annotation"
sparkConf:
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
spark.master.ui.title: "Spark Cluster with Template"
spark.master.rest.enabled: "true"
spark.master.rest.host: "0.0.0.0"
diff --git a/examples/pi-on-yunikorn.yaml b/examples/pi-on-yunikorn.yaml
index 9e115b4..029c9f3 100644
--- a/examples/pi-on-yunikorn.yaml
+++ b/examples/pi-on-yunikorn.yaml
@@ -26,7 +26,7 @@ spec:
spark.dynamicAllocation.maxExecutors: "3"
spark.log.structuredLogging.enabled: "false"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
spark.kubernetes.scheduler.name: "yunikorn"
spark.kubernetes.driver.label.queue: "root.default"
spark.kubernetes.executor.label.queue: "root.default"
diff --git a/examples/pi-scala.yaml b/examples/pi-scala.yaml
index 2c2f010..3744ae1 100644
--- a/examples/pi-scala.yaml
+++ b/examples/pi-scala.yaml
@@ -25,7 +25,7 @@ spec:
spark.dynamicAllocation.maxExecutors: "3"
spark.log.structuredLogging.enabled: "false"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "spark:4.0.0-preview1-scala"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1-scala"
applicationTolerations:
resourceRetainPolicy: OnFailure
runtimeVersions:
diff --git a/examples/pi-with-one-pod.yaml b/examples/pi-with-one-pod.yaml
index 3cff551..f46d977 100644
--- a/examples/pi-with-one-pod.yaml
+++ b/examples/pi-with-one-pod.yaml
@@ -25,6 +25,6 @@ spec:
spark.kubernetes.driver.limit.cores: "5"
spark.log.structuredLogging.enabled: "false"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
runtimeVersions:
sparkVersion: "4.0.0-preview1"
diff --git a/examples/pi.yaml b/examples/pi.yaml
index 78e3b2e..f99499d 100644
--- a/examples/pi.yaml
+++ b/examples/pi.yaml
@@ -25,7 +25,7 @@ spec:
spark.dynamicAllocation.maxExecutors: "3"
spark.log.structuredLogging.enabled: "false"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
applicationTolerations:
resourceRetainPolicy: OnFailure
runtimeVersions:
diff --git a/examples/prod-cluster-with-three-workers.yaml
b/examples/prod-cluster-with-three-workers.yaml
index d685f43..e42249a 100644
--- a/examples/prod-cluster-with-three-workers.yaml
+++ b/examples/prod-cluster-with-three-workers.yaml
@@ -25,7 +25,7 @@ spec:
minWorkers: 3
maxWorkers: 3
sparkConf:
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
spark.master.ui.title: "Prod Spark Cluster"
spark.master.rest.enabled: "true"
spark.master.rest.host: "0.0.0.0"
diff --git a/examples/pyspark-pi.yaml b/examples/pyspark-pi.yaml
index ad61f62..b6ec540 100644
--- a/examples/pyspark-pi.yaml
+++ b/examples/pyspark-pi.yaml
@@ -24,7 +24,7 @@ spec:
spark.dynamicAllocation.maxExecutors: "3"
spark.log.structuredLogging.enabled: "false"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
applicationTolerations:
resourceRetainPolicy: OnFailure
runtimeVersions:
diff --git a/examples/qa-cluster-with-one-worker.yaml
b/examples/qa-cluster-with-one-worker.yaml
index 131808a..46d3a6f 100644
--- a/examples/qa-cluster-with-one-worker.yaml
+++ b/examples/qa-cluster-with-one-worker.yaml
@@ -25,7 +25,7 @@ spec:
minWorkers: 1
maxWorkers: 1
sparkConf:
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
spark.master.ui.title: "QA Spark Cluster"
spark.master.rest.enabled: "true"
spark.master.rest.host: "0.0.0.0"
diff --git a/examples/sql.yaml b/examples/sql.yaml
index f355902..9639723 100644
--- a/examples/sql.yaml
+++ b/examples/sql.yaml
@@ -26,6 +26,6 @@ spec:
spark.dynamicAllocation.maxExecutors: "3"
spark.log.structuredLogging.enabled: "false"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
runtimeVersions:
sparkVersion: "4.0.0-preview1"
diff --git a/tests/e2e/python/chainsaw-test.yaml
b/tests/e2e/python/chainsaw-test.yaml
index 4147d2f..4f94394 100644
--- a/tests/e2e/python/chainsaw-test.yaml
+++ b/tests/e2e/python/chainsaw-test.yaml
@@ -27,7 +27,7 @@ spec:
- name: "SCALA_VERSION"
value: "2.12"
- name: "IMAGE"
- value: 'spark:3.5.2-scala2.12-java17-python3-ubuntu'
+ value: "apache/spark:3.5.2-scala2.12-java17-python3-ubuntu"
steps:
- name: install-spark-application
try:
@@ -66,4 +66,4 @@ spec:
namespace: default
- podLogs:
selector: spark-app-name=spark
- namespace: default
\ No newline at end of file
+ namespace: default
diff --git a/tests/e2e/spark-versions/chainsaw-test.yaml
b/tests/e2e/spark-versions/chainsaw-test.yaml
index 71a4c00..f4a07f1 100644
--- a/tests/e2e/spark-versions/chainsaw-test.yaml
+++ b/tests/e2e/spark-versions/chainsaw-test.yaml
@@ -29,7 +29,7 @@ spec:
- name: "JAVA_VERSION"
value: "17"
- name: "IMAGE"
- value: 'spark:4.0.0-preview1-scala2.13-java17-ubuntu'
+ value: "apache/spark:4.0.0-preview1-scala2.13-java17-ubuntu"
- bindings:
- name: "SPARK_VERSION"
value: "3.5.2"
diff --git a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
index f36b286..85fd21c 100644
--- a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
@@ -26,7 +26,7 @@ spec:
minWorkers: 1
maxWorkers: 1
sparkConf:
- spark.kubernetes.container.image: "spark:4.0.0-preview1"
+ spark.kubernetes.container.image: "apache/spark:4.0.0-preview1"
spark.master.ui.title: "Spark Cluster E2E Test"
spark.master.rest.enabled: "true"
spark.master.rest.host: "0.0.0.0"
diff --git a/tests/e2e/state-transition/spark-example-succeeded.yaml
b/tests/e2e/state-transition/spark-example-succeeded.yaml
index dcec4a3..a3e7b76 100644
--- a/tests/e2e/state-transition/spark-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-example-succeeded.yaml
@@ -25,7 +25,7 @@ spec:
jars:
"local:///opt/spark/examples/jars/spark-examples_2.13-4.0.0-preview1.jar"
sparkConf:
spark.executor.instances: "1"
- spark.kubernetes.container.image:
"spark:4.0.0-preview1-scala2.13-java17-ubuntu"
+ spark.kubernetes.container.image:
"apache/spark:4.0.0-preview1-scala2.13-java17-ubuntu"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
runtimeVersions:
sparkVersion: 4.0.0-preview1
diff --git a/tests/e2e/watched-namespaces/spark-example.yaml
b/tests/e2e/watched-namespaces/spark-example.yaml
index 031ea08..a36a0a2 100644
--- a/tests/e2e/watched-namespaces/spark-example.yaml
+++ b/tests/e2e/watched-namespaces/spark-example.yaml
@@ -25,7 +25,7 @@ spec:
jars:
"local:///opt/spark/examples/jars/spark-examples_2.13-4.0.0-preview1.jar"
sparkConf:
spark.executor.instances: "1"
- spark.kubernetes.container.image:
"spark:4.0.0-preview1-scala2.13-java17-ubuntu"
+ spark.kubernetes.container.image:
"apache/spark:4.0.0-preview1-scala2.13-java17-ubuntu"
spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
spark.kubernetes.driver.request.cores: "0.5"
spark.kubernetes.executor.request.cores: "0.5"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]