beliefer commented on code in PR #47666:
URL: https://github.com/apache/spark/pull/47666#discussion_r1753669071


##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala:
##########
@@ -123,4 +132,49 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationV2Suite with V2JDBCT
       )
     }
   }
+
+  test("SPARK-49162: Push down date_trunc function") {
+    def testDateTruncPushdown(format: String, expectedResult: Set[Row]): Unit 
= {
+      val df = sql(
+        s"""
+            SELECT DATE_TRUNC('$format', time), COUNT(*)
+            | FROM $catalogName.datetime_table
+            | GROUP BY 1

Review Comment:
   Why introduce aggregate here? we just need test predicates.



##########
sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala:
##########
@@ -1599,6 +1599,14 @@ class JDBCV2Suite extends QueryTest with 
SharedSparkSession with ExplainSuiteHel
       "PushedTopN: ORDER BY [EXTRACT(DAY_OF_YEAR FROM DATE1) ASC NULLS FIRST] 
LIMIT 1,"
     checkPushedInfo(df9, expectedPlanFragment9)
     checkAnswer(df9, Seq(Row("alex")))
+
+    val df10 = sql("SELECT name FROM h2.test.datetime WHERE " +
+      "DATE_TRUNC('DAY', date1) = date'2022-05-19'")

Review Comment:
   Shall we add more test cases for other format supported by Spark, such as: 
`YEAR`, `MM` and so on.



##########
sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala:
##########
@@ -1599,6 +1599,14 @@ class JDBCV2Suite extends QueryTest with 
SharedSparkSession with ExplainSuiteHel
       "PushedTopN: ORDER BY [EXTRACT(DAY_OF_YEAR FROM DATE1) ASC NULLS FIRST] 
LIMIT 1,"
     checkPushedInfo(df9, expectedPlanFragment9)
     checkAnswer(df9, Seq(Row("alex")))
+
+    val df10 = sql("SELECT name FROM h2.test.datetime WHERE " +
+      "DATE_TRUNC('DAY', date1) = date'2022-05-19'")

Review Comment:
   Let's create a separate test case.



##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala:
##########
@@ -123,4 +132,49 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationV2Suite with V2JDBCT
       )
     }
   }
+
+  test("SPARK-49162: Push down date_trunc function") {
+    def testDateTruncPushdown(format: String, expectedResult: Set[Row]): Unit 
= {
+      val df = sql(
+        s"""
+            SELECT DATE_TRUNC('$format', time), COUNT(*)
+            | FROM $catalogName.datetime_table
+            | GROUP BY 1

Review Comment:
   Why introduce aggregate here? we just need test predicates.



##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala:
##########
@@ -123,4 +132,49 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationV2Suite with V2JDBCT
       )
     }
   }
+
+  test("SPARK-49162: Push down date_trunc function") {
+    def testDateTruncPushdown(format: String, expectedResult: Set[Row]): Unit 
= {
+      val df = sql(
+        s"""
+            SELECT DATE_TRUNC('$format', time), COUNT(*)
+            | FROM $catalogName.datetime_table
+            | GROUP BY 1

Review Comment:
   Why introduce aggregate here? we just need test predicates.



##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala:
##########
@@ -123,4 +132,49 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationV2Suite with V2JDBCT
       )
     }
   }
+
+  test("SPARK-49162: Push down date_trunc function") {
+    def testDateTruncPushdown(format: String, expectedResult: Set[Row]): Unit 
= {
+      val df = sql(
+        s"""
+            SELECT DATE_TRUNC('$format', time), COUNT(*)
+            | FROM $catalogName.datetime_table
+            | GROUP BY 1

Review Comment:
   Why introduce aggregate here? we just need test predicates.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to