This is an automated email from the ASF dual-hosted git repository. wenchen pushed a commit to branch branch0-2.3 in repository https://gitbox.apache.org/repos/asf/spark.git
commit a956e9c765026de0009da4a5867bb768375c22ed Author: Wenchen Fan <[email protected]> AuthorDate: Fri Apr 26 16:37:43 2019 +0900 [SPARK-27563][SQL][TEST] automatically get the latest Spark versions in HiveExternalCatalogVersionsSuite We can get the latest downloadable Spark versions from https://dist.apache.org/repos/dist/release/spark/ manually. Closes #24454 from cloud-fan/test. Authored-by: Wenchen Fan <[email protected]> Signed-off-by: HyukjinKwon <[email protected]> --- .../sql/hive/HiveExternalCatalogVersionsSuite.scala | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala index 680abb6..916f73f 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala @@ -22,6 +22,7 @@ import java.nio.charset.StandardCharsets import java.nio.file.{Files, Paths} import scala.sys.process._ +import scala.util.control.NonFatal import org.apache.hadoop.conf.Configuration @@ -166,6 +167,10 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils { """.stripMargin.getBytes("utf8")) // scalastyle:on line.size.limit + if (PROCESS_TABLES.testingVersions.isEmpty) { + fail("Fail to get the lates Spark versions to test.") + } + PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, index) => val sparkHome = new File(sparkTestingDir, s"spark-$version") if (!sparkHome.exists()) { @@ -203,7 +208,19 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils { object PROCESS_TABLES extends QueryTest with SQLTestUtils { // Tests the latest version of every release line. - val testingVersions = Seq("2.3.3") + val testingVersions: Seq[String] = { + import scala.io.Source + try { + Source.fromURL("https://dist.apache.org/repos/dist/release/spark/").mkString + .split("\n") + .filter(_.contains("""<li><a href="spark-""")) + .map("""<a href="spark-(\d.\d.\d)/">""".r.findFirstMatchIn(_).get.group(1)) + .filter(_ < org.apache.spark.SPARK_VERSION) + } catch { + // do not throw exception during object initialization. + case NonFatal(_) => Nil + } + } protected var spark: SparkSession = _ --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
