This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f745561  Revert "[SPARK-26339][SQL] Throws better exception when 
reading files that start with underscore"
f745561 is described below

commit f7455618ce6de8d2e70f10722dc112fcc6ee3cee
Author: Hyukjin Kwon <gurwls...@apache.org>
AuthorDate: Tue Jan 1 09:29:28 2019 +0800

    Revert "[SPARK-26339][SQL] Throws better exception when reading files that 
start with underscore"
    
    This reverts commit c0b9db120d4c2ad0b5b99b9152549e94ef8f5a2d.
---
 .../spark/sql/execution/datasources/DataSource.scala | 17 +----------------
 sql/core/src/test/resources/test-data/_cars.csv      |  7 -------
 .../sql/execution/datasources/csv/CSVSuite.scala     | 20 --------------------
 3 files changed, 1 insertion(+), 43 deletions(-)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
index 517e043..fefff68 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
@@ -543,7 +543,7 @@ case class DataSource(
       checkFilesExist: Boolean): Seq[Path] = {
     val allPaths = caseInsensitiveOptions.get("path") ++ paths
     val hadoopConf = sparkSession.sessionState.newHadoopConf()
-    val allGlobPath = allPaths.flatMap { path =>
+    allPaths.flatMap { path =>
       val hdfsPath = new Path(path)
       val fs = hdfsPath.getFileSystem(hadoopConf)
       val qualified = hdfsPath.makeQualified(fs.getUri, fs.getWorkingDirectory)
@@ -560,21 +560,6 @@ case class DataSource(
       }
       globPath
     }.toSeq
-
-    val (filteredOut, filteredIn) = allGlobPath.partition { path =>
-      InMemoryFileIndex.shouldFilterOut(path.getName)
-    }
-    if (filteredOut.nonEmpty) {
-      if (filteredIn.isEmpty) {
-        throw new AnalysisException(
-          s"All paths were ignored:\n${filteredOut.mkString("\n  ")}")
-      } else {
-        logDebug(
-          s"Some paths were ignored:\n${filteredOut.mkString("\n  ")}")
-      }
-    }
-
-    allGlobPath
   }
 }
 
diff --git a/sql/core/src/test/resources/test-data/_cars.csv 
b/sql/core/src/test/resources/test-data/_cars.csv
deleted file mode 100644
index 40ded57..0000000
--- a/sql/core/src/test/resources/test-data/_cars.csv
+++ /dev/null
@@ -1,7 +0,0 @@
-
-year,make,model,comment,blank
-"2012","Tesla","S","No comment",
-
-1997,Ford,E350,"Go get one now they are going fast",
-2015,Chevy,Volt
-
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
index fb1bedf..d9e5d7a 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
@@ -53,7 +53,6 @@ class CSVSuite extends QueryTest with SharedSQLContext with 
SQLTestUtils with Te
   private val carsEmptyValueFile = "test-data/cars-empty-value.csv"
   private val carsBlankColName = "test-data/cars-blank-column-name.csv"
   private val carsCrlf = "test-data/cars-crlf.csv"
-  private val carsFilteredOutFile = "test-data/_cars.csv"
   private val emptyFile = "test-data/empty.csv"
   private val commentsFile = "test-data/comments.csv"
   private val disableCommentsFile = "test-data/disable_comments.csv"
@@ -347,25 +346,6 @@ class CSVSuite extends QueryTest with SharedSQLContext 
with SQLTestUtils with Te
     assert(result.schema.fieldNames.size === 1)
   }
 
-  test("SPARK-26339 Not throw an exception if some of specified paths are 
filtered in") {
-    val cars = spark
-      .read
-      .option("header", "false")
-      .csv(testFile(carsFile), testFile(carsFilteredOutFile))
-
-    verifyCars(cars, withHeader = false, checkTypes = false)
-  }
-
-  test("SPARK-26339 Throw an exception only if all of the specified paths are 
filtered out") {
-    val e = intercept[AnalysisException] {
-      val cars = spark
-        .read
-        .option("header", "false")
-        .csv(testFile(carsFilteredOutFile))
-    }.getMessage
-    assert(e.contains("All paths were ignored:"))
-  }
-
   test("DDL test with empty file") {
     withView("carsTable") {
       spark.sql(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to