This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 5d8aee5  [SPARK-27445][SQL][TEST] Update SQLQueryTestSuite to process 
files ending with `.sql`
5d8aee5 is described below

commit 5d8aee588696b6ddd0a6b60545c952184ab0e2b1
Author: Dilip Biswal <[email protected]>
AuthorDate: Thu Apr 11 14:50:46 2019 -0700

    [SPARK-27445][SQL][TEST] Update SQLQueryTestSuite to process files ending 
with `.sql`
    
    ## What changes were proposed in this pull request?
    While using vi or vim to edit the test files the .swp or .swo files are 
created and attempt to run the test suite in the presence of these files causes 
errors like below :
    ```
    nfo] - subquery/exists-subquery/.exists-basic.sql.swp *** FAILED *** (117 
milliseconds)
    [info]   java.io.FileNotFoundException: 
/Users/dbiswal/mygit/apache/spark/sql/core/target/scala-2.12/test-classes/sql-tests/results/subquery/exists-subquery/.exists-basic.sql.swp.out
 (No such file or directory)
    [info]   at java.io.FileInputStream.open0(Native Method)
    [info]   at java.io.FileInputStream.open(FileInputStream.java:195)
    [info]   at java.io.FileInputStream.<init>(FileInputStream.java:138)
    [info]   at 
org.apache.spark.sql.catalyst.util.package$.fileToString(package.scala:49)
    [info]   at 
org.apache.spark.sql.SQLQueryTestSuite.runQueries(SQLQueryTestSuite.scala:247)
    [info]   at 
org.apache.spark.sql.SQLQueryTestSuite.$anonfun$runTest$11(SQLQueryTestSuite.scala:192)
    ```
    ~~This minor pr adds these temp files in the ignore list.~~
    While computing the list of test files to process, only consider files with 
`.sql` extension. This makes sure the unwanted temp files created from various 
editors are ignored from processing.
    ## How was this patch tested?
    Verified manually.
    
    Closes #24333 from dilipbiswal/dkb_sqlquerytest.
    
    Authored-by: Dilip Biswal <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala   | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
index d45ceca..def99c8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
@@ -105,11 +105,11 @@ class SQLQueryTestSuite extends QueryTest with 
SharedSQLContext {
   private val inputFilePath = new File(baseResourcePath, 
"inputs").getAbsolutePath
   private val goldenFilePath = new File(baseResourcePath, 
"results").getAbsolutePath
 
+  private val validFileExtensions = ".sql"
+
   /** List of test cases to ignore, in lower cases. */
   private val blackList = Set(
-    "blacklist.sql",  // Do NOT remove this one. It is here to test the 
blacklist functionality.
-    ".DS_Store"       // A meta-file that may be created on Mac by Finder App.
-                      // We should ignore this file from processing.
+    "blacklist.sql"   // Do NOT remove this one. It is here to test the 
blacklist functionality.
   )
 
   // Create all the test cases.
@@ -329,7 +329,10 @@ class SQLQueryTestSuite extends QueryTest with 
SharedSQLContext {
   /** Returns all the files (not directories) in a directory, recursively. */
   private def listFilesRecursively(path: File): Seq[File] = {
     val (dirs, files) = path.listFiles().partition(_.isDirectory)
-    files ++ dirs.flatMap(listFilesRecursively)
+    // Filter out test files with invalid extensions such as temp files created
+    // by vi (.swp), Mac (.DS_Store) etc.
+    val filteredFiles = files.filter(_.getName.endsWith(validFileExtensions))
+    filteredFiles ++ dirs.flatMap(listFilesRecursively)
   }
 
   /** Load built-in test tables into the SparkSession. */


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to