Repository: spark
Updated Branches:
  refs/heads/branch-2.0 a21ecc996 -> 750f88045


[SPARK-16966][SQL][CORE] App Name is a randomUUID even when "spark.app.name" 
exists

## What changes were proposed in this pull request?

Don't override app name specified in `SparkConf` with a random app name. Only 
set it if the conf has no app name even after options have been applied.

See also https://github.com/apache/spark/pull/14602
This is similar to Sherry302 's original proposal in 
https://github.com/apache/spark/pull/14556

## How was this patch tested?

Jenkins test, with new case reproducing the bug

Author: Sean Owen <so...@cloudera.com>

Closes #14630 from srowen/SPARK-16966.2.

(cherry picked from commit cdaa562c9a09e2e83e6df4e84d911ce1428a7a7c)
Signed-off-by: Reynold Xin <r...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/750f8804
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/750f8804
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/750f8804

Branch: refs/heads/branch-2.0
Commit: 750f8804540df5ad68a732f68598c4a2dbbc4761
Parents: a21ecc9
Author: Sean Owen <so...@cloudera.com>
Authored: Sat Aug 13 15:40:43 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Sat Aug 13 15:40:59 2016 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/sql/SparkSession.scala   | 11 +++++++----
 .../org/apache/spark/sql/SparkSessionBuilderSuite.scala  |  1 +
 2 files changed, 8 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/750f8804/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 946d8cb..c88206c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -822,16 +822,19 @@ object SparkSession {
         // No active nor global default session. Create a new one.
         val sparkContext = userSuppliedContext.getOrElse {
           // set app name if not given
-          if (!options.contains("spark.app.name")) {
-            options += "spark.app.name" -> java.util.UUID.randomUUID().toString
-          }
-
+          val randomAppName = java.util.UUID.randomUUID().toString
           val sparkConf = new SparkConf()
           options.foreach { case (k, v) => sparkConf.set(k, v) }
+          if (!sparkConf.contains("spark.app.name")) {
+            sparkConf.setAppName(randomAppName)
+          }
           val sc = SparkContext.getOrCreate(sparkConf)
           // maybe this is an existing SparkContext, update its SparkConf 
which maybe used
           // by SparkSession
           options.foreach { case (k, v) => sc.conf.set(k, v) }
+          if (!sc.conf.contains("spark.app.name")) {
+            sc.conf.setAppName(randomAppName)
+          }
           sc
         }
         session = new SparkSession(sparkContext)

http://git-wip-us.apache.org/repos/asf/spark/blob/750f8804/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 418345b..386d13d 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -100,6 +100,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
     assert(session.conf.get("key2") == "value2")
     assert(session.sparkContext.conf.get("key1") == "value1")
     assert(session.sparkContext.conf.get("key2") == "value2")
+    assert(session.sparkContext.conf.get("spark.app.name") == "test")
     session.stop()
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to