This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new 29a4e04 [SPARK-27563][SQL][TEST] automatically get the latest Spark
versions in HiveExternalCatalogVersionsSuite
29a4e04 is described below
commit 29a4e048feb459e5121b6d21c741a81f48991f64
Author: Wenchen Fan <[email protected]>
AuthorDate: Fri Apr 26 16:37:43 2019 +0900
[SPARK-27563][SQL][TEST] automatically get the latest Spark versions in
HiveExternalCatalogVersionsSuite
## What changes were proposed in this pull request?
We can get the latest downloadable Spark versions from
https://dist.apache.org/repos/dist/release/spark/
## How was this patch tested?
manually.
Closes #24454 from cloud-fan/test.
Authored-by: Wenchen Fan <[email protected]>
Signed-off-by: HyukjinKwon <[email protected]>
---
.../sql/hive/HiveExternalCatalogVersionsSuite.scala | 19 ++++++++++++++++++-
1 file changed, 18 insertions(+), 1 deletion(-)
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index a4d6a69..8828471 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -22,6 +22,7 @@ import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import scala.sys.process._
+import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
@@ -166,6 +167,10 @@ class HiveExternalCatalogVersionsSuite extends
SparkSubmitTestUtils {
""".stripMargin.getBytes("utf8"))
// scalastyle:on line.size.limit
+ if (PROCESS_TABLES.testingVersions.isEmpty) {
+ fail("Fail to get the lates Spark versions to test.")
+ }
+
PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version,
index) =>
val sparkHome = new File(sparkTestingDir, s"spark-$version")
if (!sparkHome.exists()) {
@@ -203,7 +208,19 @@ class HiveExternalCatalogVersionsSuite extends
SparkSubmitTestUtils {
object PROCESS_TABLES extends QueryTest with SQLTestUtils {
// Tests the latest version of every release line.
- val testingVersions = Seq("2.3.3", "2.4.2")
+ val testingVersions: Seq[String] = {
+ import scala.io.Source
+ try {
+
Source.fromURL("https://dist.apache.org/repos/dist/release/spark/").mkString
+ .split("\n")
+ .filter(_.contains("""<li><a href="spark-"""))
+ .map("""<a
href="spark-(\d.\d.\d)/">""".r.findFirstMatchIn(_).get.group(1))
+ .filter(_ < org.apache.spark.SPARK_VERSION)
+ } catch {
+ // do not throw exception during object initialization.
+ case NonFatal(_) => Nil
+ }
+ }
protected var spark: SparkSession = _
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]