Repository: spark
Updated Branches:
  refs/heads/master 94f7e046a -> 10f45b3c8


[SPARK-22047][FLAKY TEST] HiveExternalCatalogVersionsSuite

## What changes were proposed in this pull request?

This PR tries to download Spark for each test run, to make sure each test run 
is absolutely isolated.

## How was this patch tested?

N/A

Author: Wenchen Fan <wenc...@databricks.com>

Closes #19265 from cloud-fan/test.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/10f45b3c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/10f45b3c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/10f45b3c

Branch: refs/heads/master
Commit: 10f45b3c84ff7b3f1765dc6384a563c33d26548b
Parents: 94f7e04
Author: Wenchen Fan <wenc...@databricks.com>
Authored: Tue Sep 19 11:53:50 2017 +0800
Committer: Wenchen Fan <wenc...@databricks.com>
Committed: Tue Sep 19 11:53:50 2017 +0800

----------------------------------------------------------------------
 .../spark/sql/hive/HiveExternalCatalogVersionsSuite.scala    | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/10f45b3c/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index 01db9eb..305f5b5 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -35,16 +35,18 @@ import org.apache.spark.util.Utils
  * expected version under this local directory, e.g. 
`/tmp/spark-test/spark-2.0.3`, we will skip the
  * downloading for this spark version.
  */
-@org.scalatest.Ignore
 class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
   private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse")
   private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data")
-  private val sparkTestingDir = "/tmp/spark-test"
+  // For local test, you can set `sparkTestingDir` to a static value like 
`/tmp/test-spark`, to
+  // avoid downloading Spark of different versions in each run.
+  private val sparkTestingDir = Utils.createTempDir(namePrefix = "test-spark")
   private val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
 
   override def afterAll(): Unit = {
     Utils.deleteRecursively(wareHousePath)
     Utils.deleteRecursively(tmpDataDir)
+    Utils.deleteRecursively(sparkTestingDir)
     super.afterAll()
   }
 
@@ -53,7 +55,7 @@ class HiveExternalCatalogVersionsSuite extends 
SparkSubmitTestUtils {
 
     val url = 
s"https://d3kbcqa49mib13.cloudfront.net/spark-$version-bin-hadoop2.7.tgz";
 
-    Seq("wget", url, "-q", "-P", sparkTestingDir).!
+    Seq("wget", url, "-q", "-P", sparkTestingDir.getCanonicalPath).!
 
     val downloaded = new File(sparkTestingDir, 
s"spark-$version-bin-hadoop2.7.tgz").getCanonicalPath
     val targetDir = new File(sparkTestingDir, 
s"spark-$version").getCanonicalPath


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to