This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new f8ff18223b36 [SPARK-46972][SQL] Fix asymmetrical replacement for char/varchar in V2SessionCatalog.createTable f8ff18223b36 is described below commit f8ff18223b365afa59ee077dc7535f1190073069 Author: Kent Yao <y...@apache.org> AuthorDate: Mon Feb 5 12:01:08 2024 -0800 [SPARK-46972][SQL] Fix asymmetrical replacement for char/varchar in V2SessionCatalog.createTable ### What changes were proposed in this pull request? This PR removes the asymmetrical replacement for char/varchar in V2SessionCatalog.createTable ### Why are the changes needed? Replacement for char/varchar shall happen in both sizes of the equation `DataType.equalsIgnoreNullability(tableSchema, schema))` ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? new tests ### Was this patch authored or co-authored using generative AI tooling? no Closes #45019 from yaooqinn/SPARK-46972. Authored-by: Kent Yao <y...@apache.org> Signed-off-by: Dongjoon Hyun <dh...@apple.com> --- .../spark/sql/execution/datasources/v2/V2SessionCatalog.scala | 9 +++------ .../org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala | 9 +++++++++ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index e7445e970fa5..0cb3f8dca38f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -27,7 +27,6 @@ import org.apache.spark.SparkUnsupportedOperationException import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, TableIdentifier} import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException, TableAlreadyExistsException} import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogStorageFormat, CatalogTable, CatalogTableType, CatalogUtils, ClusterBySpec, SessionCatalog} -import org.apache.spark.sql.catalyst.util.CharVarcharUtils import org.apache.spark.sql.catalyst.util.TypeUtils._ import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogV2Util, Column, FunctionCatalog, Identifier, NamespaceChange, SupportsNamespaces, Table, TableCatalog, TableCatalogCapability, TableChange, V1Table} import org.apache.spark.sql.connector.catalog.NamespaceChange.RemoveProperty @@ -206,11 +205,9 @@ class V2SessionCatalog(catalog: SessionCatalog) } val table = tableProvider.getTable(schema, partitions, dsOptions) // Check if the schema of the created table matches the given schema. - val tableSchema = CharVarcharUtils.replaceCharVarcharWithStringInSchema( - table.columns().asSchema) - if (!DataType.equalsIgnoreNullability(tableSchema, schema)) { - throw QueryCompilationErrors.dataSourceTableSchemaMismatchError( - tableSchema, schema) + val tableSchema = table.columns().asSchema + if (!DataType.equalsIgnoreNullability(table.columns().asSchema, schema)) { + throw QueryCompilationErrors.dataSourceTableSchemaMismatchError(tableSchema, schema) } (schema, partitioning) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index f92a9a827b1c..2701738351b1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -1735,6 +1735,15 @@ class DataSourceV2SQLSuiteV1Filter } } + test("SPARK-46972: asymmetrical replacement for char/varchar in V2SessionCatalog.createTable") { + // unset this config to use the default v2 session catalog. + spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key) + withTable("t") { + sql(s"CREATE TABLE t(c char(1), v varchar(2)) USING $v2Source") + assert(!spark.table("t").isEmpty) + } + } + test("ShowCurrentNamespace: basic tests") { def testShowCurrentNamespace(expectedCatalogName: String, expectedNamespace: String): Unit = { val schema = new StructType() --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org