Repository: spark
Updated Branches:
  refs/heads/master f4346f612 -> 0370abdfd


[MINOR] Hide the error logs for 'SQLListenerMemoryLeakSuite'

Hide the error logs for 'SQLListenerMemoryLeakSuite' to avoid noises. Most of 
changes are space changes.

Author: Shixiong Zhu <shixi...@databricks.com>

Closes #10363 from zsxwing/hide-log.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0370abdf
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0370abdf
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0370abdf

Branch: refs/heads/master
Commit: 0370abdfd636566cd8df954c6f9ea5a794d275ef
Parents: f4346f6
Author: Shixiong Zhu <shixi...@databricks.com>
Authored: Thu Dec 17 18:18:12 2015 -0800
Committer: Shixiong Zhu <shixi...@databricks.com>
Committed: Thu Dec 17 18:18:12 2015 -0800

----------------------------------------------------------------------
 .../sql/execution/ui/SQLListenerSuite.scala     | 64 +++++++++++---------
 1 file changed, 35 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/0370abdf/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
index 12a4e13..11a6ce9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
@@ -336,39 +336,45 @@ class SQLListenerSuite extends SparkFunSuite with 
SharedSQLContext {
 class SQLListenerMemoryLeakSuite extends SparkFunSuite {
 
   test("no memory leak") {
-    val conf = new SparkConf()
-      .setMaster("local")
-      .setAppName("test")
-      .set("spark.task.maxFailures", "1") // Don't retry the tasks to run this 
test quickly
-      .set("spark.sql.ui.retainedExecutions", "50") // Set it to 50 to run 
this test quickly
-    val sc = new SparkContext(conf)
+    val oldLogLevel = org.apache.log4j.Logger.getRootLogger().getLevel()
     try {
-      SQLContext.clearSqlListener()
-      val sqlContext = new SQLContext(sc)
-      import sqlContext.implicits._
-      // Run 100 successful executions and 100 failed executions.
-      // Each execution only has one job and one stage.
-      for (i <- 0 until 100) {
-        val df = Seq(
-          (1, 1),
-          (2, 2)
-        ).toDF()
-        df.collect()
-        try {
-          df.foreach(_ => throw new RuntimeException("Oops"))
-        } catch {
-          case e: SparkException => // This is expected for a failed job
+      
org.apache.log4j.Logger.getRootLogger().setLevel(org.apache.log4j.Level.FATAL)
+      val conf = new SparkConf()
+        .setMaster("local")
+        .setAppName("test")
+        .set("spark.task.maxFailures", "1") // Don't retry the tasks to run 
this test quickly
+        .set("spark.sql.ui.retainedExecutions", "50") // Set it to 50 to run 
this test quickly
+      val sc = new SparkContext(conf)
+      try {
+        SQLContext.clearSqlListener()
+        val sqlContext = new SQLContext(sc)
+        import sqlContext.implicits._
+        // Run 100 successful executions and 100 failed executions.
+        // Each execution only has one job and one stage.
+        for (i <- 0 until 100) {
+          val df = Seq(
+            (1, 1),
+            (2, 2)
+          ).toDF()
+          df.collect()
+          try {
+            df.foreach(_ => throw new RuntimeException("Oops"))
+          } catch {
+            case e: SparkException => // This is expected for a failed job
+          }
         }
+        sc.listenerBus.waitUntilEmpty(10000)
+        assert(sqlContext.listener.getCompletedExecutions.size <= 50)
+        assert(sqlContext.listener.getFailedExecutions.size <= 50)
+        // 50 for successful executions and 50 for failed executions
+        assert(sqlContext.listener.executionIdToData.size <= 100)
+        assert(sqlContext.listener.jobIdToExecutionId.size <= 100)
+        assert(sqlContext.listener.stageIdToStageMetrics.size <= 100)
+      } finally {
+        sc.stop()
       }
-      sc.listenerBus.waitUntilEmpty(10000)
-      assert(sqlContext.listener.getCompletedExecutions.size <= 50)
-      assert(sqlContext.listener.getFailedExecutions.size <= 50)
-      // 50 for successful executions and 50 for failed executions
-      assert(sqlContext.listener.executionIdToData.size <= 100)
-      assert(sqlContext.listener.jobIdToExecutionId.size <= 100)
-      assert(sqlContext.listener.stageIdToStageMetrics.size <= 100)
     } finally {
-      sc.stop()
+      org.apache.log4j.Logger.getRootLogger().setLevel(oldLogLevel)
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to