dongjoon-hyun commented on a change in pull request #24598: [SPARK-27699][SQL] 
Partially push down disjunctive predicated in Parquet/ORC
URL: https://github.com/apache/spark/pull/24598#discussion_r284777681
 
 

 ##########
 File path: sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
 ##########
 @@ -2978,6 +2979,31 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
     }
   }
 
+  test("SPARK-27699 Validate pushed down filters") {
+    def checkPushedFilters(df: DataFrame, filters: Array[sources.Filter]): 
Unit = {
+      val scan = df.queryExecution.sparkPlan
+        .find(_.isInstanceOf[BatchScanExec]).get.asInstanceOf[BatchScanExec]
+        .scan
+      assert(scan.isInstanceOf[OrcScan])
+      assert(scan.asInstanceOf[OrcScan].pushedFilters === filters)
+    }
+    withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> "") {
+      withTempPath { dir =>
+        spark.range(10).map(i => (i, i.toString)).toDF("id", 
"s").write.orc(dir.getCanonicalPath)
+        val df = spark.read.orc(dir.getCanonicalPath)
 
 Review comment:
   It would be better to have Parquet. Yes. I got it. You want to use a DSv2 
way for testing.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to