allisonwang-db commented on a change in pull request #32606:
URL: https://github.com/apache/spark/pull/32606#discussion_r636635430
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/RemoveRedundantProjectsSuite.scala
##########
@@ -215,6 +217,27 @@ abstract class RemoveRedundantProjectsSuiteBase
|LIMIT 10
|""".stripMargin
assertProjectExec(query, 0, 3)
+
+ }
+ Seq("true", "false").foreach { codegenEnabled =>
+ test("SPARK-35287: project generating unsafe row " +
Review comment:
"project generating unsafe row for DataSourceV2ScanRelation should not
be removed"
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/RemoveRedundantProjectsSuite.scala
##########
@@ -215,6 +217,27 @@ abstract class RemoveRedundantProjectsSuiteBase
|LIMIT 10
|""".stripMargin
assertProjectExec(query, 0, 3)
+
+ }
+ Seq("true", "false").foreach { codegenEnabled =>
+ test("SPARK-35287: project generating unsafe row " +
+ s"should not be removed (codegen=$codegenEnabled)") {
+ withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
Review comment:
Why do we need to set the broadcast hash join threshold and the leaf
node default parallelism?
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/RemoveRedundantProjectsSuite.scala
##########
@@ -215,6 +217,27 @@ abstract class RemoveRedundantProjectsSuiteBase
|LIMIT 10
|""".stripMargin
assertProjectExec(query, 0, 3)
+
+ }
+ Seq("true", "false").foreach { codegenEnabled =>
+ test("SPARK-35287: project generating unsafe row " +
+ s"should not be removed (codegen=$codegenEnabled)") {
+ withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
+ SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> codegenEnabled,
+ SQLConf.LEAF_NODE_DEFAULT_PARALLELISM.key -> "1") {
+ withTempPath { path =>
+ val format = classOf[SimpleWritableDataSource].getName
+ spark.range(3).select($"id" as "i", $"id" as "j")
+ .write.format(format).mode("overwrite").save(path.getCanonicalPath)
+
+ val df = spark.read.format(format).load(path.getCanonicalPath)
+ val dfLeft = df.as("x")
+ val dfRight = df.as("y")
+ val join = dfLeft.filter(dfLeft("i") > 0).join(dfRight, "i")
+ assert(join.collect === Array(Row(1, 1, 1), Row(2, 2, 2)))
Review comment:
Instead of having two tests, how about providing a more meaningful error
message here with the codegenEnabled value. Then it's easy to tell which case
fails from the failure message. Also, let's assert the number of project nods
in the plan using `assertProjectExecCount`.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]