sunxiaoguang commented on code in PR #49453:
URL: https://github.com/apache/spark/pull/49453#discussion_r1919498732
##########
connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala:
##########
@@ -241,6 +241,58 @@ class MySQLIntegrationSuite extends
DockerJDBCIntegrationV2Suite with V2JDBCTest
assert(rows10(0).getString(0) === "amy")
assert(rows10(1).getString(0) === "alex")
}
+
+ test("SPARK-50793: MySQL JDBC Connector failed to cast some types") {
+ val tableName = catalogName + ".test_cast_function"
+ withTable(tableName) {
+ val stringValue = "0"
+ val stringLiteral = "'0'"
+ val longValue = 0L
+ val binaryValue = Array[Byte](0x30)
+ val binaryLiteral = "x'30'"
+ val doubleValue = 0.0
+ val doubleLiteral = "0.0"
+ // CREATE table to use types defined in Spark SQL
+ sql(s"""CREATE TABLE $tableName (
Review Comment:
The template is longer than 100 limit and fails the style check. Unless we
change column name to something like c1,c2,c3,c4. What do you think about
changing to these column names?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]