spark git commit: [SPARK-11672][ML] flaky spark.ml read/write tests

2015-11-12 Thread meng
Repository: spark
Updated Branches:
  refs/heads/master e4e46b20f -> e71c07557


[SPARK-11672][ML] flaky spark.ml read/write tests

We set `sqlContext = null` in `afterAll`. However, this doesn't change 
`SQLContext.activeContext`  and then `SQLContext.getOrCreate` might use the 
`SparkContext` from previous test suite and hence causes the error. This PR 
calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context 
from other test suites.

cc: yhuai

Author: Xiangrui Meng 

Closes #9677 from mengxr/SPARK-11672.2.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e71c0755
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e71c0755
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e71c0755

Branch: refs/heads/master
Commit: e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb
Parents: e4e46b2
Author: Xiangrui Meng 
Authored: Thu Nov 12 20:01:13 2015 -0800
Committer: Xiangrui Meng 
Committed: Thu Nov 12 20:01:13 2015 -0800

--
 .../java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java | 4 ++--
 .../apache/spark/ml/classification/LogisticRegressionSuite.scala | 2 +-
 .../test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala  | 2 +-
 .../scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala| 2 +-
 .../org/apache/spark/mllib/util/MLlibTestSparkContext.scala  | 2 ++
 5 files changed, 7 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
--
diff --git 
a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java 
b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
index 4f7aeac..c395380 100644
--- 
a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
+++ 
b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.Test;
 
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.SQLContext;
@@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
 Utils.deleteRecursively(tempDir);
   }
 
-  @Ignore // SPARK-11672
+  @Test
   public void testDefaultReadWrite() throws IOException {
 String uid = "my_params";
 MyParams instance = new MyParams(uid);

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
index e4c2f1b..51b06b7 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
@@ -872,7 +872,7 @@ class LogisticRegressionSuite
 assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
 // Set some Params to make sure set Params are serialized.
 val lr = new LogisticRegression()
   .setElasticNetParam(0.1)

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala 
b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
index a66fe03..9dfa143 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
@@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with 
MLlibTestSparkContext with Defau
 }
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
 val binarizer = new Binarizer()
   .setInputCol("feature")
   .setOutputCol("binarized_feature")

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala 
b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
index 44e09c3..cac4bd9 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/ut

spark git commit: [SPARK-11672][ML] flaky spark.ml read/write tests

2015-11-12 Thread meng
Repository: spark
Updated Branches:
  refs/heads/branch-1.6 46a536e45 -> 874cd29f2


[SPARK-11672][ML] flaky spark.ml read/write tests

We set `sqlContext = null` in `afterAll`. However, this doesn't change 
`SQLContext.activeContext`  and then `SQLContext.getOrCreate` might use the 
`SparkContext` from previous test suite and hence causes the error. This PR 
calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context 
from other test suites.

cc: yhuai

Author: Xiangrui Meng 

Closes #9677 from mengxr/SPARK-11672.2.

(cherry picked from commit e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb)
Signed-off-by: Xiangrui Meng 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/874cd29f
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/874cd29f
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/874cd29f

Branch: refs/heads/branch-1.6
Commit: 874cd29f20aa155cad04e41674ce9e98c20eb575
Parents: 46a536e
Author: Xiangrui Meng 
Authored: Thu Nov 12 20:01:13 2015 -0800
Committer: Xiangrui Meng 
Committed: Thu Nov 12 20:01:21 2015 -0800

--
 .../java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java | 4 ++--
 .../apache/spark/ml/classification/LogisticRegressionSuite.scala | 2 +-
 .../test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala  | 2 +-
 .../scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala| 2 +-
 .../org/apache/spark/mllib/util/MLlibTestSparkContext.scala  | 2 ++
 5 files changed, 7 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/874cd29f/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
--
diff --git 
a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java 
b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
index 4f7aeac..c395380 100644
--- 
a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
+++ 
b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.Test;
 
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.SQLContext;
@@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
 Utils.deleteRecursively(tempDir);
   }
 
-  @Ignore // SPARK-11672
+  @Test
   public void testDefaultReadWrite() throws IOException {
 String uid = "my_params";
 MyParams instance = new MyParams(uid);

http://git-wip-us.apache.org/repos/asf/spark/blob/874cd29f/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
index e4c2f1b..51b06b7 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
@@ -872,7 +872,7 @@ class LogisticRegressionSuite
 assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
 // Set some Params to make sure set Params are serialized.
 val lr = new LogisticRegression()
   .setElasticNetParam(0.1)

http://git-wip-us.apache.org/repos/asf/spark/blob/874cd29f/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala 
b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
index a66fe03..9dfa143 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
@@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with 
MLlibTestSparkContext with Defau
 }
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
 val binarizer = new Binarizer()
   .setInputCol("feature")
   .setOutputCol("binarized_feature")

http://git-wip-us.apache.org/repos/asf/spark/blob/874cd29f/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala 
b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
index 44e09c3..cac4bd9 100644
--- a/mllib/src/test