cloud-fan commented on code in PR #36328:
URL: https://github.com/apache/spark/pull/36328#discussion_r857294807


##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala:
##########
@@ -1934,6 +1949,43 @@ abstract class ParquetFilterSuite extends QueryTest with 
ParquetTest with Shared
       checkAnswer(notIn, Seq())
     }
   }
+
+  private def testStringPredicateWithDictionaryFilter(
+      dataFrame: DataFrame, filter: String): Unit = {
+    Seq(true, false).foreach { enableDictionary =>
+      withTempPath { dir =>
+        val path = dir.getCanonicalPath
+        dataFrame.write
+          .option(ParquetOutputFormat.ENABLE_DICTIONARY, enableDictionary)
+          .parquet(path)
+        Seq(true, false).foreach { pushDown =>
+          withSQLConf(
+            SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_PREDICATE_ENABLED.key -> 
pushDown.toString) {
+            val accu = new NumRowGroupsAcc
+            sparkContext.register(accu)
+
+            val df = spark.read.parquet(path).filter(filter)
+            df.foreachPartition((it: Iterator[Row]) => it.foreach(v => 
accu.add(0)))
+            if (enableDictionary && pushDown) {
+              assert(accu.value == 0)
+            } else {
+              assert(accu.value > 0)
+            }
+
+            AccumulatorContext.remove(accu.id)
+          }
+        }
+      }
+    }
+  }
+
+  test("filter pushdown - StringEndsWith/Contains") {

Review Comment:
   can we merge the existing startswith test into the new one?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to