This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git
The following commit(s) were added to refs/heads/main by this push:
new 6629764 [SPARK-49388] Remove `SPARK_NO_DAEMONIZE` in favor of live
log UIs
6629764 is described below
commit 66297647380faaafeb0893d52ca0326c5132fc2f
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Sun Aug 25 22:52:46 2024 -0700
[SPARK-49388] Remove `SPARK_NO_DAEMONIZE` in favor of live log UIs
### What changes were proposed in this pull request?
This PR aims to remove `SPARK_NO_DAEMONIZE` in favor of live log UIs.
- https://github.com/apache/spark/pull/44890
- https://github.com/apache/spark/pull/44888
### Why are the changes needed?
Spark Master/Worker supports live log UIs already. A user doesn't need to
use `kubectl` to access K8s log.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Manual review.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #103 from dongjoon-hyun/SPARK-49388.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../spark/k8s/operator/SparkClusterResourceSpec.java | 20 ++++++++++++--------
1 file changed, 12 insertions(+), 8 deletions(-)
diff --git
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
index 390af3d..8eed53b 100644
---
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
+++
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
@@ -176,15 +176,14 @@ public class SparkClusterResourceSpec {
.withName("master")
.withImage(image)
.addNewEnv()
- .withName("SPARK_NO_DAEMONIZE")
- .withValue("1")
- .endEnv()
- .addNewEnv()
.withName("SPARK_MASTER_OPTS")
.withValue(options)
.endEnv()
.addToCommand("bash")
- .addToArgs("/opt/spark/sbin/start-master.sh")
+ .addToArgs(
+ "-c",
+ "/opt/spark/sbin/start-master.sh && while
/opt/spark/sbin/spark-daemon.sh status "
+ + "org.apache.spark.deploy.master.Master 1; do sleep 1; done")
.addNewPort()
.withName("web")
.withContainerPort(8080)
@@ -240,15 +239,20 @@ public class SparkClusterResourceSpec {
.withName("worker")
.withImage(image)
.addNewEnv()
- .withName("SPARK_NO_DAEMONIZE")
- .withValue("1")
+ .withName("SPARK_LOG_DIR")
+ .withValue("/opt/spark/work/logs")
.endEnv()
.addNewEnv()
.withName("SPARK_WORKER_OPTS")
.withValue(options)
.endEnv()
.addToCommand("bash")
- .addToArgs("/opt/spark/sbin/start-worker.sh", "spark://" + name +
"-master-svc:7077")
+ .addToArgs(
+ "-c",
+ "/opt/spark/sbin/start-worker.sh spark://"
+ + name
+ + "-master-svc:7077 && while /opt/spark/sbin/spark-daemon.sh
status "
+ + "org.apache.spark.deploy.worker.Worker 1; do sleep 1; done")
.endContainer()
.endSpec()
.endTemplate()
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]