After unit tests, clear port properties unconditionally

In MapOutputTrackerSuite, the "remote fetch" test sets spark.driver.port
and spark.hostPort, assuming that they will be cleared by
LocalSparkContext. However, the test never sets sc, so it remains null,
causing LocalSparkContext to skip clearing these properties. Subsequent
tests therefore fail with java.net.BindException: "Address already in
use".

This commit makes LocalSparkContext clear the properties even if sc is
null.


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/026dba6a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/026dba6a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/026dba6a

Branch: refs/heads/scala-2.10
Commit: 026dba6abaaf6314a79ce873bb38b73a9b7fd1a7
Parents: cd7222c
Author: Ankur Dave <ankurd...@gmail.com>
Authored: Thu Sep 19 22:05:23 2013 -0700
Committer: Ankur Dave <ankurd...@gmail.com>
Committed: Thu Sep 19 22:05:23 2013 -0700

----------------------------------------------------------------------
 .../test/scala/org/apache/spark/LocalSparkContext.scala   | 10 +++++-----
 .../test/scala/org/apache/spark/SharedSparkContext.scala  |  6 ++----
 2 files changed, 7 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/026dba6a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala 
b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index 6ec124d..459e257 100644
--- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -40,17 +40,17 @@ trait LocalSparkContext extends BeforeAndAfterEach with 
BeforeAndAfterAll { self
   }
 
   def resetSparkContext() = {
-    if (sc != null) {
-      LocalSparkContext.stop(sc)
-      sc = null
-    }
+    LocalSparkContext.stop(sc)
+    sc = null
   }
 
 }
 
 object LocalSparkContext {
   def stop(sc: SparkContext) {
-    sc.stop()
+    if (sc != null) {
+      sc.stop()
+    }
     // To avoid Akka rebinding to the same port, since it doesn't unbind 
immediately on shutdown
     System.clearProperty("spark.driver.port")
     System.clearProperty("spark.hostPort")

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/026dba6a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala 
b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
index 97cbca0..288aa14 100644
--- a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
@@ -33,10 +33,8 @@ trait SharedSparkContext extends BeforeAndAfterAll { self: 
Suite =>
   }
 
   override def afterAll() {
-    if (_sc != null) {
-      LocalSparkContext.stop(_sc)
-      _sc = null
-    }
+    LocalSparkContext.stop(_sc)
+    _sc = null
     super.afterAll()
   }
 }

Reply via email to