This is an automated email from the ASF dual-hosted git repository.
yumwang pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.5 by this push:
new 19acdc906ca [SPARK-44466][SQL] Exclude configs starting with
`SPARK_DRIVER_PREFIX` and `SPARK_EXECUTOR_PREFIX` from modifiedConfigs
19acdc906ca is described below
commit 19acdc906cadd82d9d21a1e798aba773bfe383c3
Author: Yuming Wang <[email protected]>
AuthorDate: Tue Jul 25 10:45:32 2023 +0800
[SPARK-44466][SQL] Exclude configs starting with `SPARK_DRIVER_PREFIX` and
`SPARK_EXECUTOR_PREFIX` from modifiedConfigs
### What changes were proposed in this pull request?
This PR excludes configs starting with `SPARK_DRIVER_PREFIX` and
`SPARK_EXECUTOR_PREFIX` from modifiedConfigs.
### Why are the changes needed?
To make `SQL / DataFrame Properties` excluding these properties because
some configs [might
supplemented](https://github.com/apache/spark/blob/caa3df48d94ff2e7c824a87acf51ab4978e18098/core/src/main/scala/org/apache/spark/SparkContext.scala#L422-L423):
<img width="1640" alt="image"
src="https://github.com/apache/spark/assets/5399861/9c1ad853-5883-4c66-8a32-573604ebb7ea">
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Unit test.
Closes #42049 from wangyum/SPARK-44466.
Authored-by: Yuming Wang <[email protected]>
Signed-off-by: Yuming Wang <[email protected]>
(cherry picked from commit 7f730f65b65f4bed8edcf466f7599385ec8d408f)
Signed-off-by: Yuming Wang <[email protected]>
---
.../main/scala/org/apache/spark/sql/execution/SQLExecution.scala | 7 ++++++-
.../scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala | 3 +++
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala
index 68b29e9e216..7c157a74996 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala
@@ -21,6 +21,7 @@ import java.util.concurrent.{ConcurrentHashMap,
ExecutorService, Future => JFutu
import java.util.concurrent.atomic.AtomicLong
import org.apache.spark.{ErrorMessageFormat, SparkContext, SparkThrowable,
SparkThrowableHelper}
+import org.apache.spark.internal.config.{SPARK_DRIVER_PREFIX,
SPARK_EXECUTOR_PREFIX}
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd,
SparkListenerSQLExecutionStart}
@@ -97,7 +98,11 @@ object SQLExecution {
val globalConfigs = sparkSession.sharedState.conf.getAll.toMap
val modifiedConfigs = sparkSession.sessionState.conf.getAllConfs
- .filterNot(kv => globalConfigs.get(kv._1).contains(kv._2))
+ .filterNot { case (key, value) =>
+ key.startsWith(SPARK_DRIVER_PREFIX) ||
+ key.startsWith(SPARK_EXECUTOR_PREFIX) ||
+ globalConfigs.get(key).contains(value)
+ }
val redactedConfigs =
sparkSession.sessionState.conf.redactOptions(modifiedConfigs)
withSQLConfPropagated(sparkSession) {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala
index 740c10f17b2..1a062d8d4e2 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala
@@ -26,6 +26,7 @@ import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent,
SparkListenerJobStart}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart
@@ -167,6 +168,7 @@ class SQLExecutionSuite extends SparkFunSuite {
.master("local[*]")
.appName("test")
.config("k1", "v1")
+ .config(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dkey=value")
.getOrCreate()
try {
@@ -182,6 +184,7 @@ class SQLExecutionSuite extends SparkFunSuite {
assert(start.modifiedConfigs("k2") == "v2")
assert(start.modifiedConfigs.contains("redaction.password"))
assert(start.modifiedConfigs("redaction.password") ==
REDACTION_REPLACEMENT_TEXT)
+
assert(!start.modifiedConfigs.contains(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS))
index.incrementAndGet()
}
case _ =>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]