Repository: spark
Updated Branches:
  refs/heads/master c7aeecd08 -> 51afde9d8


[SPARK-4010][Web UI]Spark UI returns 500 in yarn-client mode

The problem caused by #1966
CC YanTangZhai andrewor14

Author: GuoQiang Li <[email protected]>

Closes #2858 from witgo/SPARK-4010 and squashes the following commits:

9866fbf [GuoQiang Li] Spark UI returns 500 in yarn-client mode


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/51afde9d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/51afde9d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/51afde9d

Branch: refs/heads/master
Commit: 51afde9d8b8a67958c4632a13af143d7c7fd1f04
Parents: c7aeecd
Author: GuoQiang Li <[email protected]>
Authored: Mon Oct 20 11:01:26 2014 -0700
Committer: Andrew Or <[email protected]>
Committed: Mon Oct 20 11:03:53 2014 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala      | 8 ++++----
 .../scala/org/apache/spark/ui/jobs/JobProgressPage.scala     | 2 +-
 2 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/51afde9d/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index dd31579..ac7935b 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -239,6 +239,10 @@ class SparkContext(config: SparkConf) extends Logging {
       None
     }
 
+  // Bind the UI before starting the task scheduler to communicate
+  // the bound port to the cluster manager properly
+  ui.foreach(_.bind())
+
   /** A default Hadoop Configuration for the Hadoop code (e.g. file systems) 
that we reuse. */
   val hadoopConfiguration = SparkHadoopUtil.get.newConfiguration(conf)
 
@@ -341,10 +345,6 @@ class SparkContext(config: SparkConf) extends Logging {
   postEnvironmentUpdate()
   postApplicationStart()
 
-  // Bind the SparkUI after starting the task scheduler
-  // because certain pages and listeners depend on it
-  ui.foreach(_.bind())
-
   private[spark] var checkpointDir: Option[String] = None
 
   // Thread Local variable that can be used by users to pass information down 
the stack

http://git-wip-us.apache.org/repos/asf/spark/blob/51afde9d/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressPage.scala 
b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressPage.scala
index a82f71e..1e02f12 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressPage.scala
@@ -29,7 +29,7 @@ private[ui] class JobProgressPage(parent: JobProgressTab) 
extends WebUIPage("")
   private val live = parent.live
   private val sc = parent.sc
   private val listener = parent.listener
-  private lazy val isFairScheduler = parent.isFairScheduler
+  private def isFairScheduler = parent.isFairScheduler
 
   def render(request: HttpServletRequest): Seq[Node] = {
     listener.synchronized {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to