This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push:
new 3b27fb80f2dc [SPARK-44638][SQL][TESTS][3.4] Add test for Char/Varchar
in JDBC customSchema option
3b27fb80f2dc is described below
commit 3b27fb80f2dca4aa1ec5886a6355750ec0bd4777
Author: Kent Yao <[email protected]>
AuthorDate: Thu Aug 1 23:45:25 2024 +0800
[SPARK-44638][SQL][TESTS][3.4] Add test for Char/Varchar in JDBC
customSchema option
### What changes were proposed in this pull request?
Char/Varchar in JDBC `customSchema` option once broke in Spark 3.1 ~ 3.4,
but seem to be restored in master by some recent works in the JDBC area, this
PR add a test to cover.
### Why are the changes needed?
test cov
### Does this PR introduce _any_ user-facing change?
no
### How was this patch tested?
test added
### Was this patch authored or co-authored using generative AI tooling?
no
Closes #47550 from yaooqinn/SPARK-44638-F.
Authored-by: Kent Yao <yaoapache.org>
Closes #47558 from yaooqinn/SPARK-44638-F.
Authored-by: Kent Yao <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
---
.../scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala | 9 +++++++++
.../apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 7 +++++--
2 files changed, 14 insertions(+), 2 deletions(-)
diff --git
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
index 883bb0ce7bab..af6896ca13ae 100644
---
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
+++
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
@@ -213,6 +213,15 @@ class MySQLIntegrationSuite extends
DockerJDBCIntegrationSuite {
val nulls = spark.read.jdbc(jdbcUrl, "numbers", new
Properties).tail(1).head
assert(nulls === Row(null, null, null, null, null, null, null, null, null))
}
+
+ test("SPARK-44638: Char/Varchar in Custom Schema") {
+ val df = spark.read.option("url", jdbcUrl)
+ .option("query", "SELECT c, d from strings")
+ .option("customSchema", "c CHAR(10), d VARCHAR(10)")
+ .format("jdbc")
+ .load()
+ assert(df.head === Row("brown ", "fox"))
+ }
}
/**
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 90778c3053dd..db45c65a12ea 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -38,6 +38,7 @@ import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.SpecificInternalRow
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils,
GenericArrayData}
+import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.catalyst.util.DateTimeUtils.{instantToMicros,
localDateTimeToMicros, localDateToDays, toJavaDate, toJavaTimestamp,
toJavaTimestampNoRebase}
import org.apache.spark.sql.connector.catalog.{Identifier, TableChange}
import org.apache.spark.sql.connector.catalog.index.{SupportsIndex, TableIndex}
@@ -380,8 +381,10 @@ object JdbcUtils extends Logging with SQLConfHelper {
* Creates `JDBCValueGetter`s according to [[StructType]], which can set
* each value from `ResultSet` to each field of [[InternalRow]] correctly.
*/
- private def makeGetters(schema: StructType): Array[JDBCValueGetter] =
- schema.fields.map(sf => makeGetter(sf.dataType, sf.metadata))
+ private def makeGetters(schema: StructType): Array[JDBCValueGetter] = {
+ val replaced =
CharVarcharUtils.replaceCharVarcharWithStringInSchema(schema)
+ replaced.fields.map(sf => makeGetter(sf.dataType, sf.metadata))
+ }
private def makeGetter(dt: DataType, metadata: Metadata): JDBCValueGetter =
dt match {
case BooleanType =>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]