Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21568#discussion_r201737045
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala ---
    @@ -138,18 +139,58 @@ class SQLQueryTestSuite extends QueryTest with 
SharedSQLContext {
       private def runTest(testCase: TestCase): Unit = {
         val input = fileToString(new File(testCase.inputFile))
     
    +    val (comments, code) = input.split("\n").partition(_.startsWith("--"))
    +    val configSets = {
    +      val configLines = 
comments.filter(_.startsWith("--SET")).map(_.substring(5))
    +      val configs = configLines.map(_.split(",").map { confAndValue =>
    +        val (conf, value) = confAndValue.span(_ != '=')
    +        conf.trim -> value.substring(1).trim
    +      })
    +      // When we are regenerating the golden files we don't need to run 
all the configs as they
    +      // all need to return the same result
    +      if (regenerateGoldenFiles && configs.nonEmpty) {
    +        configs.take(1)
    +      } else {
    +        configs
    +      }
    +    }
         // List of SQL queries to run
    -    val queries: Seq[String] = {
    -      val cleaned = 
input.split("\n").filterNot(_.startsWith("--")).mkString("\n")
    -      // note: this is not a robust way to split queries using semicolon, 
but works for now.
    -      cleaned.split("(?<=[^\\\\]);").map(_.trim).filter(_ != "").toSeq
    +    // note: this is not a robust way to split queries using semicolon, 
but works for now.
    +    val queries = 
code.mkString("\n").split("(?<=[^\\\\]);").map(_.trim).filter(_ != "").toSeq
    +
    +    if (configSets.isEmpty) {
    +      runQueries(queries, testCase.resultFile, None)
    +    } else {
    +      configSets.foreach { configSet =>
    +        try {
    --- End diff --
    
    I think it's better to do the try-catch inside `runQueries`, so that we can 
know which config cause a failure.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to