c21 commented on a change in pull request #31595:
URL: https://github.com/apache/spark/pull/31595#discussion_r581222939
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
##########
@@ -808,6 +813,53 @@ class DataFrameSetOperationsSuite extends QueryTest with
SharedSparkSession {
// scalastyle:on
checkAnswer(union, row1 :: row2 :: Nil)
}
+
+ test("SPARK-34474: Remove unnecessary Union under Distinct") {
+ Seq(RemoveNoopUnion.ruleName, "").map { ruleName =>
+ withSQLConf(SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> ruleName) {
+ val distinctUnionDF1 = testData.union(testData).distinct()
+ checkAnswer(distinctUnionDF1, testData.distinct())
+
+
+ val distinctUnionDF2 =
testData.union(testData).dropDuplicates(Seq("key"))
+ checkAnswer(distinctUnionDF2, testData.dropDuplicates(Seq("key")))
+
+ val distinctUnionDF3 = sql(
+ """
+ |select key, value from testData
+ |union
+ |select key, value from testData
+ |""".stripMargin)
+ checkAnswer(distinctUnionDF3, testData.distinct())
+
+ val distinctUnionDF4 = sql(
+ """
+ |select distinct key, expr
+ |from
+ |(
+ | select key, key + 1 as expr
+ | from testData
+ | union all
+ | select key, key + 2 as expr
+ | from testData
+ |)
Review comment:
@viirya - in
https://github.com/apache/spark/compare/80b03dd64e317...c21:remove-union, I am
testing this query:
```
select distinct key, expr
from
(
select key, key + 1 as expr
from testData
union all
select key, key + 2 as expr
from testData
)
```
Optimized logical plan with the rule:
```
== Optimized Logical Plan ==
Aggregate [key#14, expr#18], [key#14, expr#18]
+- Project [key#14, (key#14 + 1) AS expr#18]
+- SerializeFromObject [knownnotnull(assertnotnull(input[0,
org.apache.spark.sql.test.SQLTestData$TestData4, true])).key AS key#14]
+- ExternalRDD [obj#13]
```
Optimized logical plan without the rule:
```
== Optimized Logical Plan ==
Aggregate [key#2, expr#6], [key#2, expr#6]
+- Union false, false
:- Project [key#2, (key#2 + 1) AS expr#6]
: +- SerializeFromObject [knownnotnull(assertnotnull(input[0,
org.apache.spark.sql.test.SQLTestData$TestData4, true])).key AS key#2]
: +- ExternalRDD [obj#1]
+- Project [key#2, (key#2 + 2) AS expr#9]
+- SerializeFromObject [knownnotnull(assertnotnull(input[0,
org.apache.spark.sql.test.SQLTestData$TestData4, true])).key AS key#2]
+- ExternalRDD [obj#1]
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]