Repository: spark
Updated Branches:
  refs/heads/master b75954015 -> bf589fc71


[SPARK-3616] Add basic Selenium tests to WebUISuite

This patch adds Selenium tests for Spark's web UI.  To avoid adding extra
dependencies to the test environment, the tests use Selenium's HtmlUnitDriver,
which is pure-Java, instead of, say, ChromeDriver.

I added new tests to try to reproduce a few UI bugs reported on JIRA, namely
SPARK-3021, SPARK-2105, and SPARK-2527.  I wasn't able to reproduce these bugs;
I suspect that the older ones might have been fixed by other patches.

In order to use HtmlUnitDriver, I added an explicit dependency on the
org.apache.httpcomponents version of httpclient in order to prevent jets3t's
older version from taking precedence on the classpath.

I also upgraded ScalaTest to 2.2.1.

Author: Josh Rosen <[email protected]>
Author: Josh Rosen <[email protected]>

Closes #2474 from JoshRosen/webui-selenium-tests and squashes the following 
commits:

fcc9e83 [Josh Rosen] scalautils -> scalactic package rename
510e54a [Josh Rosen] [SPARK-3616] Add basic Selenium tests to WebUISuite.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bf589fc7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bf589fc7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bf589fc7

Branch: refs/heads/master
Commit: bf589fc717c842d1998e3c3a523bc8775cb30269
Parents: b759540
Author: Josh Rosen <[email protected]>
Authored: Sun Oct 26 11:29:27 2014 -0700
Committer: Josh Rosen <[email protected]>
Committed: Sun Oct 26 11:29:27 2014 -0700

----------------------------------------------------------------------
 core/pom.xml                                    |   5 +
 .../scala/org/apache/spark/ui/UIUtils.scala     |   7 +-
 .../main/scala/org/apache/spark/ui/WebUI.scala  |   2 +-
 .../org/apache/spark/ui/storage/RDDPage.scala   |   6 +-
 .../apache/spark/ui/storage/StoragePage.scala   |   2 +-
 .../org/apache/spark/ui/UISeleniumSuite.scala   | 112 +++++++++++++++++++
 pom.xml                                         |   8 +-
 .../expressions/ExpressionEvaluationSuite.scala |   2 +-
 8 files changed, 135 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 7b68dba..320d107 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -248,6 +248,11 @@
       </exclusions>
     </dependency>
     <dependency>
+      <groupId>org.seleniumhq.selenium</groupId>
+      <artifactId>selenium-java</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala 
b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 32e6b15..76714b1 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -20,7 +20,7 @@ package org.apache.spark.ui
 import java.text.SimpleDateFormat
 import java.util.{Locale, Date}
 
-import scala.xml.Node
+import scala.xml.{Text, Node}
 
 import org.apache.spark.Logging
 
@@ -239,7 +239,8 @@ private[spark] object UIUtils extends Logging {
       headers: Seq[String],
       generateDataRow: T => Seq[Node],
       data: Iterable[T],
-      fixedWidth: Boolean = false): Seq[Node] = {
+      fixedWidth: Boolean = false,
+      id: Option[String] = None): Seq[Node] = {
 
     var listingTableClass = TABLE_CLASS
     if (fixedWidth) {
@@ -263,7 +264,7 @@ private[spark] object UIUtils extends Logging {
         }
       }
     }
-    <table class={listingTableClass}>
+    <table class={listingTableClass} id={id.map(Text.apply)}>
       <thead>{headerRow}</thead>
       <tbody>
         {data.map(r => generateDataRow(r))}

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/core/src/main/scala/org/apache/spark/ui/WebUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala 
b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
index 5d88ca4..9be65a4 100644
--- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
@@ -82,7 +82,7 @@ private[spark] abstract class WebUI(
   }
 
   /** Detach a handler from this UI. */
-  def detachHandler(handler: ServletContextHandler) {
+  protected def detachHandler(handler: ServletContextHandler) {
     handlers -= handler
     serverInfo.foreach { info =>
       info.rootHandler.removeHandler(handler)

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala 
b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index 8a0075a..12d23a9 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -39,7 +39,8 @@ private[ui] class RDDPage(parent: StorageTab) extends 
WebUIPage("rdd") {
 
     // Worker table
     val workers = storageStatusList.map((rddId, _))
-    val workerTable = UIUtils.listingTable(workerHeader, workerRow, workers)
+    val workerTable = UIUtils.listingTable(workerHeader, workerRow, workers,
+      id = Some("rdd-storage-by-worker-table"))
 
     // Block table
     val blockLocations = StorageUtils.getRddBlockLocations(rddId, 
storageStatusList)
@@ -49,7 +50,8 @@ private[ui] class RDDPage(parent: StorageTab) extends 
WebUIPage("rdd") {
       .map { case (blockId, status) =>
         (blockId, status, 
blockLocations.get(blockId).getOrElse(Seq[String]("Unknown")))
       }
-    val blockTable = UIUtils.listingTable(blockHeader, blockRow, blocks)
+    val blockTable = UIUtils.listingTable(blockHeader, blockRow, blocks,
+      id = Some("rdd-storage-by-block-table"))
 
     val content =
       <div class="row-fluid">

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala 
b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
index 83489ca..6ced605 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
@@ -31,7 +31,7 @@ private[ui] class StoragePage(parent: StorageTab) extends 
WebUIPage("") {
 
   def render(request: HttpServletRequest): Seq[Node] = {
     val rdds = listener.rddInfoList
-    val content = UIUtils.listingTable(rddHeader, rddRow, rdds)
+    val content = UIUtils.listingTable(rddHeader, rddRow, rdds, id = 
Some("storage-by-rdd-table"))
     UIUtils.headerSparkPage("Storage", content, parent)
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala 
b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
new file mode 100644
index 0000000..bacf6a1
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ui
+
+import org.apache.spark.api.java.StorageLevels
+import org.apache.spark.{SparkException, SparkConf, SparkContext}
+import org.openqa.selenium.WebDriver
+import org.openqa.selenium.htmlunit.HtmlUnitDriver
+import org.scalatest._
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.time.SpanSugar._
+
+import org.apache.spark.LocalSparkContext._
+
+/**
+ * Selenium tests for the Spark Web UI.  These tests are not run by default
+ * because they're slow.
+ */
+@DoNotDiscover
+class UISeleniumSuite extends FunSuite with WebBrowser with Matchers {
+  implicit val webDriver: WebDriver = new HtmlUnitDriver
+
+  /**
+   * Create a test SparkContext with the SparkUI enabled.
+   * It is safe to `get` the SparkUI directly from the SparkContext returned 
here.
+   */
+  private def newSparkContext(): SparkContext = {
+    val conf = new SparkConf()
+      .setMaster("local")
+      .setAppName("test")
+      .set("spark.ui.enabled", "true")
+    val sc = new SparkContext(conf)
+    assert(sc.ui.isDefined)
+    sc
+  }
+
+  test("effects of unpersist() / persist() should be reflected") {
+    // Regression test for SPARK-2527
+    withSpark(newSparkContext()) { sc =>
+      val ui = sc.ui.get
+      val rdd = sc.parallelize(Seq(1, 2, 3))
+      rdd.persist(StorageLevels.DISK_ONLY).count()
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+        val tableRowText = findAll(cssSelector("#storage-by-rdd-table 
td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.DISK_ONLY.description)
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+        val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table 
td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.DISK_ONLY.description)
+      }
+
+      rdd.unpersist()
+      rdd.persist(StorageLevels.MEMORY_ONLY).count()
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+        val tableRowText = findAll(cssSelector("#storage-by-rdd-table 
td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+        val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table 
td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+      }
+    }
+  }
+
+  test("failed stages should not appear to be active") {
+    withSpark(newSparkContext()) { sc =>
+      // Regression test for SPARK-3021
+      intercept[SparkException] {
+        sc.parallelize(1 to 10).map { x => throw new Exception()}.collect()
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to sc.ui.get.appUIAddress
+        find(id("active")).get.text should be("Active Stages (0)")
+        find(id("failed")).get.text should be("Failed Stages (1)")
+      }
+
+      // Regression test for SPARK-2105
+      class NotSerializable
+      val unserializableObject = new NotSerializable
+      intercept[SparkException] {
+        sc.parallelize(1 to 10).map { x => unserializableObject}.collect()
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to sc.ui.get.appUIAddress
+        find(id("active")).get.text should be("Active Stages (0)")
+        // The failure occurs before the stage becomes active, hence we should 
still show only one
+        // failed stage, not two:
+        find(id("failed")).get.text should be("Failed Stages (1)")
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2faf0c7..2ebe1b8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -313,6 +313,12 @@
         <version>1.3.9</version>
       </dependency>
       <dependency>
+        <groupId>org.seleniumhq.selenium</groupId>
+        <artifactId>selenium-java</artifactId>
+        <version>2.42.2</version>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
         <version>${slf4j.version}</version>
@@ -520,7 +526,7 @@
       <dependency>
         <groupId>org.scalatest</groupId>
         <artifactId>scalatest_${scala.binary.version}</artifactId>
-        <version>2.1.5</version>
+        <version>2.2.1</version>
         <scope>test</scope>
       </dependency>
       <dependency>

http://git-wip-us.apache.org/repos/asf/spark/blob/bf589fc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 6dc5942..f134d73 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.immutable.HashSet
 
 import org.scalatest.FunSuite
 import org.scalatest.Matchers._
-import org.scalautils.TripleEqualsSupport.Spread
+import org.scalactic.TripleEqualsSupport.Spread
 
 import org.apache.spark.sql.catalyst.types._
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to