This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 98f7182122e [SPARK-41719][CORE] Skip SSLOptions sub-settings if `ssl`
is disabled
98f7182122e is described below
commit 98f7182122e151cbf7ea83303e39c44d9acb1a72
Author: Shrikant Prasad <[email protected]>
AuthorDate: Tue Jan 3 17:40:40 2023 -0800
[SPARK-41719][CORE] Skip SSLOptions sub-settings if `ssl` is disabled
### What changes were proposed in this pull request?
In SSLOptions rest of the settings should be set only when ssl is enabled.
### Why are the changes needed?
If spark.ssl.enabled is false, there is no use of setting rest of
spark.ssl.* settings in SSLOptions as this requires unnecessary operations to
be performed to set these properties.
Additional implication of trying to set the rest of settings is if any
error occurs in setting these properties it will cause job failure which
otherwise should have worked since ssl is disabled. For example, if the user
doesn't have access to the keystore path which is set in
hadoop.security.credential.provider.path of hive-site.xml, it can result in
failure while launching spark shell since SSLOptions won't be initialized due
to error in accessing the keystore.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Added new test.
Closes #39221 from shrprasa/ssl_options_fix.
Authored-by: Shrikant Prasad <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
core/src/main/scala/org/apache/spark/SSLOptions.scala | 4 +++-
.../test/scala/org/apache/spark/SSLOptionsSuite.scala | 18 ++++++++++++++++--
2 files changed, 19 insertions(+), 3 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/SSLOptions.scala
b/core/src/main/scala/org/apache/spark/SSLOptions.scala
index f1668966d8e..d159f5717b0 100644
--- a/core/src/main/scala/org/apache/spark/SSLOptions.scala
+++ b/core/src/main/scala/org/apache/spark/SSLOptions.scala
@@ -181,7 +181,9 @@ private[spark] object SSLOptions extends Logging {
ns: String,
defaults: Option[SSLOptions] = None): SSLOptions = {
val enabled = conf.getBoolean(s"$ns.enabled", defaultValue =
defaults.exists(_.enabled))
-
+ if (!enabled) {
+ return new SSLOptions()
+ }
val port = conf.getWithSubstitution(s"$ns.port").map(_.toInt)
port.foreach { p =>
require(p >= 0, "Port number must be a non-negative value.")
diff --git a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
index c990d81de2e..81bc4ae9da0 100644
--- a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
@@ -109,7 +109,7 @@ class SSLOptionsSuite extends SparkFunSuite {
val conf = new SparkConf
val hadoopConf = new Configuration()
conf.set("spark.ssl.enabled", "true")
- conf.set("spark.ssl.ui.enabled", "false")
+ conf.set("spark.ssl.ui.enabled", "true")
conf.set("spark.ssl.ui.port", "4242")
conf.set("spark.ssl.keyStore", keyStorePath)
conf.set("spark.ssl.keyStorePassword", "password")
@@ -125,7 +125,7 @@ class SSLOptionsSuite extends SparkFunSuite {
val defaultOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults
= None)
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl.ui", defaults =
Some(defaultOpts))
- assert(opts.enabled === false)
+ assert(opts.enabled === true)
assert(opts.port === Some(4242))
assert(opts.trustStore.isDefined)
assert(opts.trustStore.get.getName === "truststore")
@@ -140,6 +140,20 @@ class SSLOptionsSuite extends SparkFunSuite {
assert(opts.enabledAlgorithms === Set("ABC", "DEF"))
}
+ test("SPARK-41719: Skip ssl sub-settings if ssl is disabled") {
+ val keyStorePath = new
File(this.getClass.getResource("/keystore").toURI).getAbsolutePath
+ val conf = new SparkConf
+ val hadoopConf = new Configuration()
+ conf.set("spark.ssl.enabled", "false")
+ conf.set("spark.ssl.keyStorePassword", "password")
+ conf.set("spark.ssl.keyStore", keyStorePath)
+ val sslOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults =
None)
+
+ assert(sslOpts.enabled === false)
+ assert(sslOpts.keyStorePassword === None)
+ assert(sslOpts.keyStore === None)
+ }
+
test("variable substitution") {
val conf = new SparkConfWithEnv(Map(
"ENV1" -> "val1",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]