Repository: spark
Updated Branches:
  refs/heads/branch-2.2 38a0532cf -> d7b14746d


[SPARK-22654][TESTS] Retry Spark tarball download if failed in 
HiveExternalCatalogVersionsSuite

## What changes were proposed in this pull request?

Adds a simple loop to retry download of Spark tarballs from different mirrors 
if the download fails.

## How was this patch tested?

Existing tests

Author: Sean Owen <so...@cloudera.com>

Closes #19851 from srowen/SPARK-22654.

(cherry picked from commit 6eb203fae7bbc9940710da40f314b89ffb4dd324)
Signed-off-by: hyukjinkwon <gurwls...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d7b14746
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d7b14746
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d7b14746

Branch: refs/heads/branch-2.2
Commit: d7b14746dd9bd488240174446bd158be1e30c250
Parents: 38a0532
Author: Sean Owen <so...@cloudera.com>
Authored: Fri Dec 1 01:21:52 2017 +0900
Committer: hyukjinkwon <gurwls...@gmail.com>
Committed: Fri Dec 1 01:22:06 2017 +0900

----------------------------------------------------------------------
 .../hive/HiveExternalCatalogVersionsSuite.scala | 24 +++++++++++++++-----
 1 file changed, 18 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/d7b14746/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index 6859432..a3d5b94 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -20,6 +20,8 @@ package org.apache.spark.sql.hive
 import java.io.File
 import java.nio.file.Files
 
+import scala.sys.process._
+
 import org.apache.spark.TestUtils
 import org.apache.spark.sql.{QueryTest, Row, SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
@@ -50,14 +52,24 @@ class HiveExternalCatalogVersionsSuite extends 
SparkSubmitTestUtils {
     super.afterAll()
   }
 
-  private def downloadSpark(version: String): Unit = {
-    import scala.sys.process._
+  private def tryDownloadSpark(version: String, path: String): Unit = {
+    // Try mirrors a few times until one succeeds
+    for (i <- 0 until 3) {
+      val preferredMirror =
+        Seq("wget", "https://www.apache.org/dyn/closer.lua?preferred=true";, 
"-q", "-O", "-").!!.trim
+      val url = 
s"$preferredMirror/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz"
+      logInfo(s"Downloading Spark $version from $url")
+      if (Seq("wget", url, "-q", "-P", path).! == 0) {
+        return
+      }
+      logWarning(s"Failed to download Spark $version from $url")
+    }
+    fail(s"Unable to download Spark $version")
+  }
 
-    val preferredMirror =
-      Seq("wget", "https://www.apache.org/dyn/closer.lua?preferred=true";, 
"-q", "-O", "-").!!.trim
-    val url = 
s"$preferredMirror/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz"
 
-    Seq("wget", url, "-q", "-P", sparkTestingDir.getCanonicalPath).!
+  private def downloadSpark(version: String): Unit = {
+    tryDownloadSpark(version, sparkTestingDir.getCanonicalPath)
 
     val downloaded = new File(sparkTestingDir, 
s"spark-$version-bin-hadoop2.7.tgz").getCanonicalPath
     val targetDir = new File(sparkTestingDir, 
s"spark-$version").getCanonicalPath


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to