This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b633ad30ac84 [SPARK-53518][SQL] No truncation for catalogString of 
User Defined Type
b633ad30ac84 is described below

commit b633ad30ac84228ae6cd66fd4347f344a6af2d63
Author: Kent Yao <y...@apache.org>
AuthorDate: Mon Sep 8 10:27:18 2025 -0700

    [SPARK-53518][SQL] No truncation for catalogString of User Defined Type
    
    ### What changes were proposed in this pull request?
    
    `catalogString` of User Defined Type is mistakenly truncated, which leads 
to catalog errors.
    
    ### Why are the changes needed?
    
    bugfix
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    New Unit Test
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #52263 from yaooqinn/SPARK-53518.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../org/apache/spark/sql/types/UserDefinedType.scala |  2 +-
 .../scala/org/apache/spark/sql/types/TestUDT.scala   | 20 ++++++++++++++++++++
 .../org/apache/spark/sql/UserDefinedTypeSuite.scala  |  7 +++++++
 3 files changed, 28 insertions(+), 1 deletion(-)

diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
index 3d3521d88fdf..7f5baf954ab3 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
@@ -93,7 +93,7 @@ abstract class UserDefinedType[UserType >: Null] extends 
DataType with Serializa
     case _ => false
   }
 
-  override def catalogString: String = sqlType.simpleString
+  override def catalogString: String = sqlType.catalogString
 
   /**
    * This method is used to convert the value of a UDT to a string 
representation.
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala
index 04b090d7001d..2f58e722c052 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.types
 
+import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
 import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
@@ -132,3 +133,22 @@ private[spark] class ExampleSubTypeUDT extends 
UserDefinedType[IExampleSubType]
 
   override def userClass: Class[IExampleSubType] = classOf[IExampleSubType]
 }
+
+
+class ExampleIntRowUDT(cols: Int) extends UserDefinedType[Row] {
+  override def sqlType: DataType = {
+    StructType((0 until cols).map(i =>
+      StructField(s"col$i", IntegerType, nullable = false)))
+  }
+
+  override def serialize(obj: Row): InternalRow = {
+    InternalRow.fromSeq(obj.toSeq)
+  }
+
+  override def deserialize(datum: Any): Row = {
+    val internalRow = datum.asInstanceOf[InternalRow]
+    Row.fromSeq(internalRow.toSeq(sqlType.asInstanceOf[StructType]))
+  }
+
+  override def userClass: Class[Row] = classOf[Row]
+}
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
index 16330d8da43f..ab7d22a5c2ad 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
@@ -330,4 +330,11 @@ class UserDefinedTypeSuite extends QueryTest with 
SharedSparkSession with Parque
       spark.range(10).map(i => Year.of(i.toInt + 2018)),
       (0 to 9).map(i => Year.of(i + 2018)): _*)
   }
+
+  test("SPARK-53518: No truncation for catalogString of User Defined Type") {
+    withSQLConf(SQLConf.MAX_TO_STRING_FIELDS.key -> "3") {
+      val string = new ExampleIntRowUDT(4).catalogString
+      assert(string == "struct<col0:int,col1:int,col2:int,col3:int>")
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to