This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0a470430c81c [SPARK-46822][SQL] Respect 
spark.sql.legacy.charVarcharAsString when casting jdbc type to catalyst type in 
jdbc
0a470430c81c is described below

commit 0a470430c81ca2d46020f863c45e96227fbdd07c
Author: Kent Yao <[email protected]>
AuthorDate: Tue Jan 23 22:57:02 2024 -0800

    [SPARK-46822][SQL] Respect spark.sql.legacy.charVarcharAsString when 
casting jdbc type to catalyst type in jdbc
    
    ### What changes were proposed in this pull request?
    
    This PR makes `spark.sql.legacy.charVarcharAsString` be activated in 
`JdbcUtils.getCatalystType`.
    
    ### Why are the changes needed?
    
    For cases like CTAS, which respects schema from the query field can restore 
their behavior to create tables with strings instead of char/varchar.
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    new tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #44860 from yaooqinn/SPARK-46822.
    
    Authored-by: Kent Yao <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../sql/execution/datasources/jdbc/JdbcUtils.scala |  2 ++
 .../v2/jdbc/JDBCTableCatalogSuite.scala            | 22 ++++++++++++++++++++++
 2 files changed, 24 insertions(+)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 9fb10f42164f..89ac615a3097 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -185,6 +185,7 @@ object JdbcUtils extends Logging with SQLConfHelper {
     case java.sql.Types.BIT => BooleanType // @see JdbcDialect for quirks
     case java.sql.Types.BLOB => BinaryType
     case java.sql.Types.BOOLEAN => BooleanType
+    case java.sql.Types.CHAR if conf.charVarcharAsString => StringType
     case java.sql.Types.CHAR => CharType(precision)
     case java.sql.Types.CLOB => StringType
     case java.sql.Types.DATE => DateType
@@ -214,6 +215,7 @@ object JdbcUtils extends Logging with SQLConfHelper {
     case java.sql.Types.TIMESTAMP => TimestampType
     case java.sql.Types.TINYINT => IntegerType
     case java.sql.Types.VARBINARY => BinaryType
+    case java.sql.Types.VARCHAR if conf.charVarcharAsString => StringType
     case java.sql.Types.VARCHAR => VarcharType(precision)
     case _ =>
       // For unmatched types:
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
index 5408d434fced..0088fab7d209 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
@@ -608,6 +608,28 @@ class JDBCTableCatalogSuite extends QueryTest with 
SharedSparkSession {
     }
   }
 
+  test("SPARK-46822: Respect charVarcharAsString when casting jdbc type to 
catalyst type in jdbc") {
+    try {
+      withConnection(
+        _.prepareStatement("""CREATE TABLE "test"."char_tbl" (ID CHAR(5), 
deptno VARCHAR(10))""")
+        .executeUpdate())
+      withSQLConf(SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key -> "true") {
+        val expected = new StructType()
+          .add("ID", StringType, true, defaultMetadata)
+          .add("DEPTNO", StringType, true, defaultMetadata)
+        assert(sql(s"SELECT * FROM h2.test.char_tbl").schema === expected)
+      }
+      val expected = new StructType()
+        .add("ID", CharType(5), true, defaultMetadata)
+        .add("DEPTNO", VarcharType(10), true, defaultMetadata)
+      val replaced = 
CharVarcharUtils.replaceCharVarcharWithStringInSchema(expected)
+      assert(sql(s"SELECT * FROM h2.test.char_tbl").schema === replaced)
+    } finally {
+      withConnection(
+        _.prepareStatement("""DROP TABLE IF EXISTS 
"test"."char_tbl"""").executeUpdate())
+    }
+  }
+
   test("SPARK-45449: Cache Invalidation Issue with JDBC Table") {
     withTable("h2.test.cache_t") {
       withConnection { conn =>


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to