MaxGekk commented on a change in pull request #29234:
URL: https://github.com/apache/spark/pull/29234#discussion_r461693321



##########
File path: 
external/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala
##########
@@ -1800,6 +1800,44 @@ abstract class AvroSuite extends QueryTest with 
SharedSparkSession {
       assert(version === SPARK_VERSION_SHORT)
     }
   }
+
+  test("SPARK-32431: consistent error for nested and top-level duplicate 
columns") {

Review comment:
       I put the common test to `NestedDataSourceSuiteBase`

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/util/SchemaUtils.scala
##########
@@ -42,7 +44,27 @@ private[spark] object SchemaUtils {
    */
   def checkSchemaColumnNameDuplication(
       schema: StructType, colType: String, caseSensitiveAnalysis: Boolean = 
false): Unit = {
-    checkColumnNameDuplication(schema.map(_.name), colType, 
caseSensitiveAnalysis)
+    val queue = new Queue[StructType]()
+    queue.enqueue(schema)
+    do {
+      val struct = queue.dequeue()
+      checkColumnNameDuplication(struct.map(_.name), colType, 
caseSensitiveAnalysis)
+      val nestedStructs = struct.map(_.dataType).collect { case st: StructType 
=> st }
+      queue.enqueue(nestedStructs: _*)
+    } while (queue.nonEmpty)

Review comment:
       I don't think the recursive functions is good approach. To have tail 
recursion, we should split the function on 2 parts because the function should 
call itself at the end:
   ```scala
     def checkSchemaColumnNameDuplication(
         schema: StructType, colType: String, caseSensitiveAnalysis: Boolean = 
false): Unit = {
       checkSchemaColumnNameDuplication(Seq(schema), colType, 
caseSensitiveAnalysis)
     }
   
     @tailrec
     private def checkSchemaColumnNameDuplication(
        schemas: Seq[StructType], colType: String, caseSensitiveAnalysis: 
Boolean): Unit = {
       if (schemas.nonEmpty) {
         val structs = schemas.flatMap { fields =>
           checkColumnNameDuplication(fields.names, colType, 
caseSensitiveAnalysis)
           fields.collect { case StructField(_, st: StructType, _, _) => st }
         }
         checkSchemaColumnNameDuplication(structs, colType, 
caseSensitiveAnalysis)
       }
     }
   ```
   
   From my point of view, the code is:
   1. less readable
   2. less optimal
   3. error prone due stack overflow
   
   I would prefer to leave the code as is.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala
##########
@@ -43,10 +43,56 @@ import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types._
 
+trait NestedDataSourceSuiteBase extends QueryTest with SharedSparkSession{
+  protected val nestedDataSources: Seq[String]
+
+  test("SPARK-32431: consistent error for nested and top-level duplicate 
columns") {
+    Seq(
+      Seq("id AS lowercase", "id + 1 AS camelCase") ->
+        new StructType()
+          .add("LowerCase", LongType)
+          .add("camelcase", LongType)
+          .add("CamelCase", LongType),
+      Seq("NAMED_STRUCT('lowercase', id, 'camelCase', id + 1) AS 
StructColumn") ->
+        new StructType().add("StructColumn",
+          new StructType()
+            .add("LowerCase", LongType)
+            .add("camelcase", LongType)
+            .add("CamelCase", LongType))
+    ).foreach { case (selectExpr: Seq[String], caseInsensitiveSchema: 
StructType) =>
+      withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {

Review comment:
       AvroSuite tests both. We could run entire `FileBasedDataSourceSuite` for 
v1 and v2.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to