Repository: spark
Updated Branches:
  refs/heads/branch-0.9 7d007d352 -> 3fba7b7bc


[Spark-3490] Disable SparkUI for tests (backport into 0.9)

Branch-1.2 #2363 (original)
Branch-1.1 #2415
Branch-1.0 #3959
Branch-0.9 #3961 (this PR)

Author: Andrew Or <[email protected]>

Closes #3961 from andrewor14/ui-ports-0.9 and squashes the following commits:

8644997 [Andrew Or] Disable UI for tests


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3fba7b7b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3fba7b7b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3fba7b7b

Branch: refs/heads/branch-0.9
Commit: 3fba7b7bc054648742ffbb76042aeb58ae9139c2
Parents: 7d007d3
Author: Andrew Or <[email protected]>
Authored: Fri Jan 9 10:23:18 2015 -0800
Committer: Andrew Or <[email protected]>
Committed: Fri Jan 9 10:23:18 2015 -0800

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/SparkContext.scala    | 14 ++++++++++----
 .../scheduler/cluster/SimrSchedulerBackend.scala      |  5 +++--
 .../cluster/SparkDeploySchedulerBackend.scala         |  5 +++--
 pom.xml                                               |  4 ++++
 project/SparkBuild.scala                              |  1 +
 .../apache/spark/deploy/yarn/ApplicationMaster.scala  |  2 +-
 .../apache/spark/deploy/yarn/ApplicationMaster.scala  |  2 +-
 7 files changed, 23 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 566472e..1cd3703 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -155,8 +155,14 @@ class SparkContext(
     new MetadataCleaner(MetadataCleanerType.SPARK_CONTEXT, this.cleanup, conf)
 
   // Initialize the Spark UI
-  private[spark] val ui = new SparkUI(this)
-  ui.bind()
+  private[spark] val ui: Option[SparkUI] =
+    if (conf.getBoolean("spark.ui.enabled", true)) {
+      Some(new SparkUI(this))
+    } else {
+      // For tests, do not enable the UI
+      None
+    }
+  ui.foreach(_.bind())
 
   val startTime = System.currentTimeMillis()
 
@@ -202,7 +208,7 @@ class SparkContext(
   @volatile private[spark] var dagScheduler = new DAGScheduler(taskScheduler)
   dagScheduler.start()
 
-  ui.start()
+  ui.foreach(_.start())
 
   /** A default Hadoop Configuration for the Hadoop code (e.g. file systems) 
that we reuse. */
   val hadoopConfiguration = {
@@ -777,7 +783,7 @@ class SparkContext(
 
   /** Shut down the SparkContext. */
   def stop() {
-    ui.stop()
+    ui.foreach(_.stop())
     // Do this only if not stopped already - best case effort.
     // prevent NPE if stopped more than once.
     val dagSchedulerCopy = dagScheduler

http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
index d99c761..149c9fd 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
@@ -44,16 +44,17 @@ private[spark] class SimrSchedulerBackend(
 
     val conf = new Configuration()
     val fs = FileSystem.get(conf)
+    val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
 
     logInfo("Writing to HDFS file: "  + driverFilePath)
     logInfo("Writing Akka address: "  + driverUrl)
-    logInfo("Writing Spark UI Address: " + sc.ui.appUIAddress)
+    logInfo("Writing Spark UI Address: " + appUIAddress)
 
     // Create temporary file to prevent race condition where executors get 
empty driverUrl file
     val temp = fs.create(tmpPath, true)
     temp.writeUTF(driverUrl)
     temp.writeInt(maxCores)
-    temp.writeUTF(sc.ui.appUIAddress)
+    temp.writeUTF(appUIAddress)
     temp.close()
 
     // "Atomic" rename

http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index faa6e1e..dfcb22a 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -51,8 +51,9 @@ private[spark] class SparkDeploySchedulerBackend(
     val command = Command(
       "org.apache.spark.executor.CoarseGrainedExecutorBackend", args, 
sc.executorEnvs)
     val sparkHome = sc.getSparkHome().getOrElse(null)
-    val appDesc = new ApplicationDescription(appName, maxCores, 
sc.executorMemory, command, sparkHome,
-        "http://"; + sc.ui.appUIAddress)
+    val appUIAddress = sc.ui.map { x => "http://"; + x.appUIAddress 
}.getOrElse("")
+    val appDesc = new ApplicationDescription(
+      appName, maxCores, sc.executorMemory, command, sparkHome, appUIAddress)
 
     client = new AppClient(sc.env.actorSystem, masters, appDesc, this, conf)
     client.start()

http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e3f4893..6a12a4e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -636,6 +636,10 @@
             
<filereports>${project.build.directory}/SparkTestSuite.txt</filereports>
             <argLine>-Xms64m -Xmx3g</argLine>
             <stderr />
+            <systemProperties>
+              <spark.testing>true</spark.testing>
+              <spark.ui.enabled>false</spark.ui.enabled>
+            </systemProperties>
           </configuration>
           <executions>
             <execution>

http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 05b430a..2c8ee00 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -149,6 +149,7 @@ object SparkBuild extends Build {
     fork := true,
     javaOptions in Test += "-Dspark.home=" + sparkHome,
     javaOptions in Test += "-Dspark.testing=1",
+    javaOptions in Test += "-Dspark.ui.enabled=false",
     javaOptions += "-Xmx3g",
     // Show full stack trace and duration in test cases.
     testOptions in Test += Tests.Argument("-oDF"),

http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/yarn/alpha/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
----------------------------------------------------------------------
diff --git 
a/yarn/alpha/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
 
b/yarn/alpha/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 66bcaf1..b4c06fa 100644
--- 
a/yarn/alpha/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ 
b/yarn/alpha/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -227,7 +227,7 @@ class ApplicationMaster(args: ApplicationMasterArguments, 
conf: Configuration,
         assert(sparkContext != null || count >= numTries)
 
         if (null != sparkContext) {
-          uiAddress = sparkContext.ui.appUIAddress
+          uiAddress = sparkContext.ui.map(_.appUIAddress).getOrElse("")
           this.yarnAllocator = YarnAllocationHandler.newAllocator(
             yarnConf,
             resourceManager,

http://git-wip-us.apache.org/repos/asf/spark/blob/3fba7b7b/yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
----------------------------------------------------------------------
diff --git 
a/yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
 
b/yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 83f8234..0cb3474 100644
--- 
a/yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ 
b/yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -199,7 +199,7 @@ class ApplicationMaster(args: ApplicationMasterArguments, 
conf: Configuration,
         assert(sparkContext != null || numTries >= maxNumTries)
 
         if (sparkContext != null) {
-          uiAddress = sparkContext.ui.appUIAddress
+          uiAddress = sparkContext.ui.map(_.appUIAddress).getOrElse("")
           this.yarnAllocator = YarnAllocationHandler.newAllocator(
             yarnConf,
             amClient,


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to