IvanK-db commented on code in PR #47666:
URL: https://github.com/apache/spark/pull/47666#discussion_r1757070034
##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala:
##########
@@ -123,4 +132,81 @@ class PostgresIntegrationSuite extends
DockerJDBCIntegrationV2Suite with V2JDBCT
)
}
}
+
+ test("SPARK-49162: Push down aggregate date_trunc function") {
+ def testAggregatePushdown(format: String, expectedResult: Set[Row]): Unit
= {
+ val df = sql(
+ s"""
+ SELECT DATE_TRUNC('$format', time), COUNT(*)
+ | FROM $catalogName.datetime_table
+ | GROUP BY 1
+ """.stripMargin
+ )
+ val aggregates = df.queryExecution.optimizedPlan.collect {
+ case agg: Aggregate => agg
+ }
+ assert(aggregates.isEmpty)
+ assert(df.collect().toSet === expectedResult)
+ }
+
+ testAggregatePushdown("YEAR",
+ Set(Row(Timestamp.valueOf("2024-01-01 00:00:00.0"), 2)))
+ testAggregatePushdown("MONTH",
+ Set(
+ Row(Timestamp.valueOf("2024-02-01 00:00:00.0"), 1),
+ Row(Timestamp.valueOf("2024-01-01 00:00:00.0"), 1)
+ ))
+ testAggregatePushdown("DAY",
+ Set(
+ Row(Timestamp.valueOf("2024-02-02 00:00:00.0"), 1),
+ Row(Timestamp.valueOf("2024-01-01 00:00:00.0"), 1)
+ ))
+ testAggregatePushdown("HOUR",
+ Set(
+ Row(Timestamp.valueOf("2024-02-02 02:00:00.0"), 1),
+ Row(Timestamp.valueOf("2024-01-01 01:00:00.0"), 1)
+ ))
+ testAggregatePushdown("MINUTE",
+ Set(
+ Row(Timestamp.valueOf("2024-02-02 02:02:00.0"), 1),
+ Row(Timestamp.valueOf("2024-01-01 01:01:00.0"), 1)
+ ))
+ testAggregatePushdown("SECOND",
+ Set(
+ Row(Timestamp.valueOf("2024-02-02 02:02:02.0"), 1),
+ Row(Timestamp.valueOf("2024-01-01 01:01:01.0"), 1)
+ ))
+ }
+
+ test("SPARK-49162: Push down filter date_trunc function") {
+ def testFilterPushdown(format: String, date: String, expectedResult:
Set[Row]): Unit = {
+ val df = sql(
+ s"""
+ SELECT *
+ | FROM $catalogName.datetime_table
+ | WHERE DATE_TRUNC('$format', time) = '$date'
+ """.stripMargin
+ )
+ val filters = df.queryExecution.optimizedPlan.collect {
+ case f: Filter => f
+ }
+ assert(filters.isEmpty)
Review Comment:
Done
##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala:
##########
@@ -123,4 +132,49 @@ class PostgresIntegrationSuite extends
DockerJDBCIntegrationV2Suite with V2JDBCT
)
}
}
+
+ test("SPARK-49162: Push down date_trunc function") {
+ def testDateTruncPushdown(format: String, expectedResult: Set[Row]): Unit
= {
+ val df = sql(
+ s"""
+ SELECT DATE_TRUNC('$format', time), COUNT(*)
+ | FROM $catalogName.datetime_table
+ | GROUP BY 1
Review Comment:
Done
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]