This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 195d81b42142 [SPARK-53518][SQL][3.5] No truncation for catalogString 
of User Defined Type
195d81b42142 is described below

commit 195d81b421420d6c13dabcc0a6e88db53afbf74d
Author: Kent Yao <y...@apache.org>
AuthorDate: Tue Sep 9 14:17:42 2025 +0800

    [SPARK-53518][SQL][3.5] No truncation for catalogString of User Defined Type
    
    ### What changes were proposed in this pull request?
    
    `catalogString` of User Defined Type is mistakenly truncated, which leads 
to catalog errors.
    
    ### Why are the changes needed?
    
    bugfix
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    New Unit Test
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #52278 from yaooqinn/SPARK-53518-35.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../org/apache/spark/sql/types/UserDefinedType.scala |  2 +-
 .../scala/org/apache/spark/sql/types/TestUDT.scala   | 20 ++++++++++++++++++++
 .../org/apache/spark/sql/UserDefinedTypeSuite.scala  |  9 +++++++++
 3 files changed, 30 insertions(+), 1 deletion(-)

diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
index 5cbd876b31e6..33ea92226214 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
@@ -94,7 +94,7 @@ abstract class UserDefinedType[UserType >: Null] extends 
DataType with Serializa
     case _ => false
   }
 
-  override def catalogString: String = sqlType.simpleString
+  override def catalogString: String = sqlType.catalogString
 }
 
 private[spark] object UserDefinedType {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala
index 04b090d7001d..2f58e722c052 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/TestUDT.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.types
 
+import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
 import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
@@ -132,3 +133,22 @@ private[spark] class ExampleSubTypeUDT extends 
UserDefinedType[IExampleSubType]
 
   override def userClass: Class[IExampleSubType] = classOf[IExampleSubType]
 }
+
+
+class ExampleIntRowUDT(cols: Int) extends UserDefinedType[Row] {
+  override def sqlType: DataType = {
+    StructType((0 until cols).map(i =>
+      StructField(s"col$i", IntegerType, nullable = false)))
+  }
+
+  override def serialize(obj: Row): InternalRow = {
+    InternalRow.fromSeq(obj.toSeq)
+  }
+
+  override def deserialize(datum: Any): Row = {
+    val internalRow = datum.asInstanceOf[InternalRow]
+    Row.fromSeq(internalRow.toSeq(sqlType.asInstanceOf[StructType]))
+  }
+
+  override def userClass: Class[Row] = classOf[Row]
+}
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
index 9bd4a5e6f140..605e2ff52281 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
@@ -25,6 +25,7 @@ import org.apache.spark.sql.catalyst.CatalystTypeConverters
 import org.apache.spark.sql.catalyst.expressions.{Cast, ExpressionEvalHelper, 
Literal}
 import org.apache.spark.sql.execution.datasources.parquet.ParquetTest
 import org.apache.spark.sql.functions._
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types._
 
@@ -282,4 +283,12 @@ class UserDefinedTypeSuite extends QueryTest with 
SharedSparkSession with Parque
     java.util.Arrays.equals(unwrappedFeaturesArrays(0), Array(0.1, 1.0))
     java.util.Arrays.equals(unwrappedFeaturesArrays(1), Array(0.2, 2.0))
   }
+
+
+  test("SPARK-53518: No truncation for catalogString of User Defined Type") {
+    withSQLConf(SQLConf.MAX_TO_STRING_FIELDS.key -> "3") {
+      val string = new ExampleIntRowUDT(4).catalogString
+      assert(string == "struct<col0:int,col1:int,col2:int,col3:int>")
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to