This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 2df97c8  [SPARK-53511] Make `examples` directory up-to-date with Spark 
4.0.1
2df97c8 is described below

commit 2df97c883a846b3ddca70bc01a030ebbb264cb3b
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun Sep 7 19:24:13 2025 -0700

    [SPARK-53511] Make `examples` directory up-to-date with Spark 4.0.1
    
    ### What changes were proposed in this pull request?
    
    This PR aims to make `examples` directory up-to-date with Spark 4.0.1.
    
    ### Why are the changes needed?
    
    To provide newly updated examples. Two places per file are updated.
    
    ```yaml
     spec:
       runtimeVersions:
    -    sparkVersion: "4.0.0"
    +    sparkVersion: "4.0.1"
    ```
    
    ```yaml
     spec:
       sparkConf:
    -    spark.kubernetes.container.image: "apache/spark:4.0.0"
    +    spark.kubernetes.container.image: "apache/spark:4.0.1"
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    Only the example are updated. There is no behavior change on Spark K8s 
Operator side.
    
    ### How was this patch tested?
    
    Manual review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #312 from dongjoon-hyun/SPARK-53511.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 examples/cluster-java21.yaml                          | 4 ++--
 examples/cluster-on-yunikorn.yaml                     | 4 ++--
 examples/cluster-with-hpa-template.yaml               | 4 ++--
 examples/cluster-with-hpa.yaml                        | 4 ++--
 examples/cluster-with-template.yaml                   | 4 ++--
 examples/dfs-read-write.yaml                          | 4 ++--
 examples/pi-java21.yaml                               | 4 ++--
 examples/pi-on-yunikorn.yaml                          | 4 ++--
 examples/pi-scala.yaml                                | 4 ++--
 examples/pi-v1alpha1.yaml                             | 4 ++--
 examples/pi-v1beta1.yaml                              | 4 ++--
 examples/pi-with-driver-timeout.yaml                  | 4 ++--
 examples/pi-with-eventlog.yaml                        | 4 ++--
 examples/pi-with-one-pod.yaml                         | 4 ++--
 examples/pi-with-spark-connect-plugin.yaml            | 4 ++--
 examples/pi-with-template.yaml                        | 4 ++--
 examples/pi.yaml                                      | 4 ++--
 examples/prod-cluster-with-three-workers.yaml         | 4 ++--
 examples/pyspark-pi.yaml                              | 4 ++--
 examples/qa-cluster-with-one-worker.yaml              | 4 ++--
 examples/spark-connect-server-with-spark-cluster.yaml | 4 ++--
 examples/spark-connect-server.yaml                    | 4 ++--
 examples/spark-history-server.yaml                    | 4 ++--
 examples/spark-thrift-server.yaml                     | 4 ++--
 examples/sql.yaml                                     | 4 ++--
 examples/stream-word-count.yaml                       | 4 ++--
 26 files changed, 52 insertions(+), 52 deletions(-)

diff --git a/examples/cluster-java21.yaml b/examples/cluster-java21.yaml
index b6e2c0f..b81fbc3 100644
--- a/examples/cluster-java21.yaml
+++ b/examples/cluster-java21.yaml
@@ -18,14 +18,14 @@ metadata:
   name: cluster-java21
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
       minWorkers: 3
       maxWorkers: 3
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21"
     spark.master.ui.title: "Prod Spark Cluster (Java 21)"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/cluster-on-yunikorn.yaml 
b/examples/cluster-on-yunikorn.yaml
index 1304e21..fa5c882 100644
--- a/examples/cluster-on-yunikorn.yaml
+++ b/examples/cluster-on-yunikorn.yaml
@@ -18,14 +18,14 @@ metadata:
   name: cluster-on-yunikorn
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
       minWorkers: 1
       maxWorkers: 1
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
     spark.kubernetes.scheduler.name: "yunikorn"
     spark.master.ui.title: "Spark Cluster on YuniKorn Scheduler"
     spark.master.rest.enabled: "true"
diff --git a/examples/cluster-with-hpa-template.yaml 
b/examples/cluster-with-hpa-template.yaml
index 0216506..73f7a20 100644
--- a/examples/cluster-with-hpa-template.yaml
+++ b/examples/cluster-with-hpa-template.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-hpa-template
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
@@ -58,7 +58,7 @@ spec:
             value: 1
             periodSeconds: 1200
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21"
     spark.master.ui.title: "Cluster with HorizontalPodAutoscaler Template"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/cluster-with-hpa.yaml b/examples/cluster-with-hpa.yaml
index 07c6cec..8f5c164 100644
--- a/examples/cluster-with-hpa.yaml
+++ b/examples/cluster-with-hpa.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-hpa
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
@@ -38,7 +38,7 @@ spec:
                 cpu: "3"
                 memory: "3Gi"
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21"
     spark.master.ui.title: "Cluster with HorizontalPodAutoscaler"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/cluster-with-template.yaml 
b/examples/cluster-with-template.yaml
index 51fc496..4627171 100644
--- a/examples/cluster-with-template.yaml
+++ b/examples/cluster-with-template.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-template
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
@@ -93,7 +93,7 @@ spec:
       annotations:
         customAnnotation: "annotation"
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
     spark.master.ui.title: "Spark Cluster with Template"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/dfs-read-write.yaml b/examples/dfs-read-write.yaml
index ff0d1a6..5a6b47d 100644
--- a/examples/dfs-read-write.yaml
+++ b/examples/dfs-read-write.yaml
@@ -32,7 +32,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
     spark.hadoop.fs.defaultFS: "s3a://data"
     spark.hadoop.fs.s3a.endpoint: "http://localstack:4566";
     spark.hadoop.fs.s3a.path.style.access: "true"
@@ -41,4 +41,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-java21.yaml b/examples/pi-java21.yaml
index 4106bf1..14f9301 100644
--- a/examples/pi-java21.yaml
+++ b/examples/pi-java21.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-on-yunikorn.yaml b/examples/pi-on-yunikorn.yaml
index 3952a52..d0c35c4 100644
--- a/examples/pi-on-yunikorn.yaml
+++ b/examples/pi-on-yunikorn.yaml
@@ -25,7 +25,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
     spark.kubernetes.scheduler.name: "yunikorn"
     spark.kubernetes.driver.label.queue: "root.default"
     spark.kubernetes.executor.label.queue: "root.default"
@@ -34,4 +34,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-scala.yaml b/examples/pi-scala.yaml
index 7abefc6..d6e1b2d 100644
--- a/examples/pi-scala.yaml
+++ b/examples/pi-scala.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-v1alpha1.yaml b/examples/pi-v1alpha1.yaml
index d97fb6c..49d1690 100644
--- a/examples/pi-v1alpha1.yaml
+++ b/examples/pi-v1alpha1.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-v1beta1.yaml b/examples/pi-v1beta1.yaml
index 81d0dd8..5cb2c2c 100644
--- a/examples/pi-v1beta1.yaml
+++ b/examples/pi-v1beta1.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-with-driver-timeout.yaml 
b/examples/pi-with-driver-timeout.yaml
index ae23fba..a36e94d 100644
--- a/examples/pi-with-driver-timeout.yaml
+++ b/examples/pi-with-driver-timeout.yaml
@@ -27,8 +27,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-with-eventlog.yaml b/examples/pi-with-eventlog.yaml
index 6bca6ba..8c0995c 100644
--- a/examples/pi-with-eventlog.yaml
+++ b/examples/pi-with-eventlog.yaml
@@ -26,7 +26,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
     spark.eventLog.enabled: "true"
     spark.eventLog.dir: "s3a://spark-events/"
     spark.hadoop.fs.s3a.endpoint: "http://localstack:4566";
@@ -36,4 +36,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-with-one-pod.yaml b/examples/pi-with-one-pod.yaml
index 82eaf5b..4a9cc00 100644
--- a/examples/pi-with-one-pod.yaml
+++ b/examples/pi-with-one-pod.yaml
@@ -24,6 +24,6 @@ spec:
     spark.kubernetes.driver.request.cores: "5"
     spark.kubernetes.driver.limit.cores: "5"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-with-spark-connect-plugin.yaml 
b/examples/pi-with-spark-connect-plugin.yaml
index 3478983..c18e65a 100644
--- a/examples/pi-with-spark-connect-plugin.yaml
+++ b/examples/pi-with-spark-connect-plugin.yaml
@@ -26,8 +26,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi-with-template.yaml b/examples/pi-with-template.yaml
index 5570fad..8845d6f 100644
--- a/examples/pi-with-template.yaml
+++ b/examples/pi-with-template.yaml
@@ -24,7 +24,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   driverSpec:
@@ -38,4 +38,4 @@ spec:
         priorityClassName: system-cluster-critical
         terminationGracePeriodSeconds: 0
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/pi.yaml b/examples/pi.yaml
index 58fa8a1..1549622 100644
--- a/examples/pi.yaml
+++ b/examples/pi.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/prod-cluster-with-three-workers.yaml 
b/examples/prod-cluster-with-three-workers.yaml
index 128b9df..1957a6e 100644
--- a/examples/prod-cluster-with-three-workers.yaml
+++ b/examples/prod-cluster-with-three-workers.yaml
@@ -18,14 +18,14 @@ metadata:
   name: prod
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
       minWorkers: 3
       maxWorkers: 3
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
     spark.master.ui.title: "Prod Spark Cluster"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/pyspark-pi.yaml b/examples/pyspark-pi.yaml
index 347183e..0eafc93 100644
--- a/examples/pyspark-pi.yaml
+++ b/examples/pyspark-pi.yaml
@@ -23,8 +23,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/qa-cluster-with-one-worker.yaml 
b/examples/qa-cluster-with-one-worker.yaml
index 47dc71b..e9a4cb8 100644
--- a/examples/qa-cluster-with-one-worker.yaml
+++ b/examples/qa-cluster-with-one-worker.yaml
@@ -18,14 +18,14 @@ metadata:
   name: qa
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
       minWorkers: 1
       maxWorkers: 1
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
     spark.master.ui.title: "QA Spark Cluster"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/spark-connect-server-with-spark-cluster.yaml 
b/examples/spark-connect-server-with-spark-cluster.yaml
index 1e220aa..00fca54 100644
--- a/examples/spark-connect-server-with-spark-cluster.yaml
+++ b/examples/spark-connect-server-with-spark-cluster.yaml
@@ -24,7 +24,7 @@ spec:
     spark.executor.cores: "1"
     spark.cores.max: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
     spark.ui.reverseProxy: "true"
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/spark-connect-server.yaml 
b/examples/spark-connect-server.yaml
index 69ea777..e9613f2 100644
--- a/examples/spark-connect-server.yaml
+++ b/examples/spark-connect-server.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.minExecutors: "3"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/spark-history-server.yaml 
b/examples/spark-history-server.yaml
index 00f05fb..77a051f 100644
--- a/examples/spark-history-server.yaml
+++ b/examples/spark-history-server.yaml
@@ -23,7 +23,7 @@ spec:
     spark.jars.ivy: "/tmp/.ivy2.5.2"
     spark.driver.memory: "2g"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
     spark.ui.port: "18080"
     spark.history.fs.logDirectory: "s3a://spark-events"
     spark.history.fs.cleaner.enabled: "true"
@@ -36,7 +36,7 @@ spec:
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   applicationTolerations:
     restartConfig:
       restartPolicy: Always
diff --git a/examples/spark-thrift-server.yaml 
b/examples/spark-thrift-server.yaml
index 5c31140..c8450aa 100644
--- a/examples/spark-thrift-server.yaml
+++ b/examples/spark-thrift-server.yaml
@@ -24,9 +24,9 @@ spec:
     spark.dynamicAllocation.minExecutors: "3"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
   applicationTolerations:
     restartConfig:
       restartPolicy: Always
diff --git a/examples/sql.yaml b/examples/sql.yaml
index a14ee29..57bcd24 100644
--- a/examples/sql.yaml
+++ b/examples/sql.yaml
@@ -25,6 +25,6 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0"
+    spark.kubernetes.container.image: "apache/spark:4.0.1"
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"
diff --git a/examples/stream-word-count.yaml b/examples/stream-word-count.yaml
index 6b501db..9d2cd06 100644
--- a/examples/stream-word-count.yaml
+++ b/examples/stream-word-count.yaml
@@ -27,7 +27,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
     spark.log.level: "WARN"
     spark.eventLog.enabled: "true"
     spark.eventLog.dir: "s3a://spark-events/"
@@ -37,4 +37,4 @@ spec:
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
   runtimeVersions:
-    sparkVersion: "4.0.0"
+    sparkVersion: "4.0.1"


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to