c21 commented on a change in pull request #31820:
URL: https://github.com/apache/spark/pull/31820#discussion_r593402812



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileScan.scala
##########
@@ -86,7 +86,7 @@ trait FileScan extends Scan
 
   override def equals(obj: Any): Boolean = obj match {
     case f: FileScan =>
-      fileIndex == f.fileIndex && readSchema == f.readSchema
+      fileIndex == f.fileIndex && readSchema == f.readSchema &&

Review comment:
       Nice catch. It seems that https://github.com/apache/spark/pull/27112 
introduced this. 
    cc @dongjoon-hyun if we want to backport this to `3.1` and `3.0`.

##########
File path: sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
##########
@@ -4065,6 +4066,33 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
       }
     }
   }
+
+  test("SPARK-33482: Fix FileScan canonicalization") {
+    Seq(true, false).foreach { aqe =>
+      withSQLConf(SQLConf.USE_V1_SOURCE_LIST.key -> "",
+        SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> aqe.toString) {
+        withTempPath { path =>
+          spark.range(5).toDF().write.mode("overwrite").parquet(path.toString)
+          withTempView("t") {
+            spark.read.parquet(path.toString).createOrReplaceTempView("t")
+            val df = sql(
+              """
+                |SELECT *
+                |FROM t AS t1
+                |JOIN t AS t2 ON t2.id = t1.id
+                |JOIN t AS t3 ON t3.id = t2.id
+                |""".stripMargin)
+            df.collect()
+            df.explain()

Review comment:
       do we really need these two statements?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to