This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 069c46c3fd49 [SPARK-50179][CORE] Make `spark.app.name` property
optional in REST API
069c46c3fd49 is described below
commit 069c46c3fd49270918d0955bb2ad8600363d887c
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Oct 30 20:23:02 2024 -0700
[SPARK-50179][CORE] Make `spark.app.name` property optional in REST API
### What changes were proposed in this pull request?
This PR aims to make `spark.app.name` property optional in REST API.
- When `spark.app.name` is given, Spark 4 will work in the same way without
a behavior change like `app-20241030130510-0000` in the following.
- When `spark.app.name` is omitted, Spark 4 will work without any problem
like `app-20241030130520-0001` in the following.

### Why are the changes needed?
Like `SparkSession` and `SparkContext`, `spark.app.name` is optional.
- `SparkSession`
https://github.com/apache/spark/blob/bb8b691b0f66cf50937f24d0b63342ca0da07e9c/python/pyspark/sql/session.py#L396-L399
- `SparkContext`
https://github.com/apache/spark/blob/bb8b691b0f66cf50937f24d0b63342ca0da07e9c/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala#L890-L892
However, `CreateSubmissionRequest` enforces `spark.app.name` existence like
the following.
```
{
"action" : "ErrorResponse",
"message" : "Malformed request:
org.apache.spark.deploy.rest.SubmitRestProtocolException: Validation of message
CreateSubmissionRequest failed!...",
"serverSparkVersion" : "4.0.0-preview2"
}%
```
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #48711 from dongjoon-hyun/SPARK-50179.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala | 1 -
.../scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala | 2 --
examples/src/main/scripts/submit_pi.sh | 1 -
3 files changed, 4 deletions(-)
diff --git
a/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
b/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
index 63882259adcb..c6ff3dbb33cb 100644
---
a/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
+++
b/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
@@ -47,7 +47,6 @@ private[rest] class CreateSubmissionRequest extends
SubmitRestProtocolRequest {
super.doValidate()
assert(sparkProperties != null, "No Spark properties set!")
assertFieldIsSet(appResource, "appResource")
- assertPropertyIsSet("spark.app.name")
assertPropertyIsBoolean(config.DRIVER_SUPERVISE.key)
assertPropertyIsNumeric(config.DRIVER_CORES.key)
assertPropertyIsNumeric(config.CORES_MAX.key)
diff --git
a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
index db8b9cc182a7..9303d97de330 100644
---
a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
@@ -94,7 +94,6 @@ class StandaloneRestSubmitSuite extends SparkFunSuite {
val RANDOM_PORT = 9000
val allMasters = s"$masterUrl,${Utils.localHostName()}:$RANDOM_PORT"
conf.set("spark.master", allMasters)
- conf.set("spark.app.name", "dreamer")
val appArgs = Array("one", "two", "six")
// main method calls this
val response = new RestSubmissionClientApp().run("app-resource",
"main-class", appArgs, conf)
@@ -112,7 +111,6 @@ class StandaloneRestSubmitSuite extends SparkFunSuite {
val masterUrl = startDummyServer(submitId = submittedDriverId,
submitMessage = submitMessage)
val conf = new SparkConf(loadDefaults = false)
conf.set("spark.master", masterUrl)
- conf.set("spark.app.name", "dreamer")
val appArgs = Array("one", "two", "six")
// main method calls this
val response = new RestSubmissionClientApp().run("app-resource",
"main-class", appArgs, conf)
diff --git a/examples/src/main/scripts/submit_pi.sh
b/examples/src/main/scripts/submit_pi.sh
index e1a84b5b942a..490b15a1dae7 100755
--- a/examples/src/main/scripts/submit_pi.sh
+++ b/examples/src/main/scripts/submit_pi.sh
@@ -34,7 +34,6 @@ curl -XPOST http://$SPARK_MASTER:6066/v1/submissions/create \
"appResource": "",
"sparkProperties": {
"spark.submit.deployMode": "cluster",
- "spark.app.name": "SparkPi",
"spark.driver.cores": "1",
"spark.driver.memory": "1g",
"spark.executor.cores": "1",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]