This is an automated email from the ASF dual-hosted git repository.
gengliang pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.5 by this push:
new 9088249f025 [SPARK-44455][SQL] Quote identifiers with backticks in
SHOW CREATE TABLE result
9088249f025 is described below
commit 9088249f02543c549e0b330812c77edcdbbeb795
Author: Runyao Chen <[email protected]>
AuthorDate: Mon Jul 24 15:20:09 2023 -0700
[SPARK-44455][SQL] Quote identifiers with backticks in SHOW CREATE TABLE
result
### What changes were proposed in this pull request?
This PR adds backticks to any identifiers with special characters for `SHOW
CREATE TABLE`.
### Why are the changes needed?
Without proper backticks to quote the identifiers, if users copy paste the
results from running `SHOW CREATE TABLE`, they will hit analysis exception.
### Does this PR introduce _any_ user-facing change?
Yes. The identifiers from the result of `SHOW CREATE TABLE` will be quoted
if necessary (having special characters).
### How was this patch tested?
Added UT.
Closes #42034 from RunyaoChen/quote_ident_show_table.
Lead-authored-by: Runyao Chen <[email protected]>
Co-authored-by: RunyaoChen <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
(cherry picked from commit 6992f8b7db8dc19cb259caddb59a9582f0e60c6d)
Signed-off-by: Gengliang Wang <[email protected]>
---
.../datasources/v2/DataSourceV2Strategy.scala | 2 +-
.../datasources/v2/ShowCreateTableExec.scala | 11 +++++++----
.../resources/sql-tests/results/charvarchar.sql.out | 6 +++---
.../sql-tests/results/show-create-table.sql.out | 20 ++++++++++----------
.../execution/command/v1/ShowCreateTableSuite.scala | 6 +++++-
.../execution/command/v2/ShowCreateTableSuite.scala | 16 ++++++++++++++++
.../execution/command/ShowCreateTableSuite.scala | 15 ++++++++-------
7 files changed, 50 insertions(+), 26 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
index abd70f322c8..0106a9c5aea 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
@@ -456,7 +456,7 @@ class DataSourceV2Strategy(session: SparkSession) extends
Strategy with Predicat
if (asSerde) {
throw
QueryCompilationErrors.showCreateTableAsSerdeNotSupportedForV2TablesError()
}
- ShowCreateTableExec(output, rt.table) :: Nil
+ ShowCreateTableExec(output, rt) :: Nil
case TruncateTable(r: ResolvedTable) =>
TruncateTableExec(
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
index 5712159ddc8..6fa51ed63bd 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
@@ -21,6 +21,7 @@ import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.ResolvedTable
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString,
CharVarcharUtils}
@@ -34,15 +35,17 @@ import org.apache.spark.unsafe.types.UTF8String
*/
case class ShowCreateTableExec(
output: Seq[Attribute],
- table: Table) extends V2CommandExec with LeafExecNode {
+ resolvedTable: ResolvedTable) extends V2CommandExec with LeafExecNode {
override protected def run(): Seq[InternalRow] = {
val builder = new StringBuilder
- showCreateTable(table, builder)
+ showCreateTable(resolvedTable, builder)
Seq(InternalRow(UTF8String.fromString(builder.toString)))
}
- private def showCreateTable(table: Table, builder: StringBuilder): Unit = {
- builder ++= s"CREATE TABLE ${table.name()} "
+ private def showCreateTable(resolvedTable: ResolvedTable, builder:
StringBuilder): Unit = {
+ val table = resolvedTable.table
+ val quotedName = resolvedTable.name
+ builder ++= s"CREATE TABLE ${quotedName} "
showTableDataColumns(table, builder)
showTableUsing(table, builder)
diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
index 54cdc1e3528..888e8a94289 100644
--- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
@@ -49,7 +49,7 @@ show create table char_tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.char_tbl (
+CREATE TABLE spark_catalog.default.char_tbl (
c CHAR(5),
v VARCHAR(6))
USING parquet
@@ -68,7 +68,7 @@ show create table char_tbl2
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.char_tbl2 (
+CREATE TABLE spark_catalog.default.char_tbl2 (
c CHAR(5),
v VARCHAR(6))
USING parquet
@@ -161,7 +161,7 @@ show create table char_tbl3
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.char_tbl3 (
+CREATE TABLE spark_catalog.default.char_tbl3 (
c CHAR(5),
v VARCHAR(6))
USING parquet
diff --git
a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out
b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out
index 0d73960a6b3..dcb96b9d2dc 100644
--- a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out
@@ -12,7 +12,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -41,7 +41,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -73,7 +73,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -103,7 +103,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -133,7 +133,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
b STRING,
c INT,
a INT)
@@ -163,7 +163,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -195,7 +195,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT DEFAULT 42,
b STRING DEFAULT 'abc, def',
c INT DEFAULT 42)
@@ -225,7 +225,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -255,7 +255,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a INT,
b STRING,
c INT)
@@ -286,7 +286,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
-CREATE TABLE default.tbl (
+CREATE TABLE spark_catalog.default.tbl (
a FLOAT,
b DECIMAL(10,0),
c DECIMAL(10,0),
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
index 62e8f53d765..b9fcf76ad7c 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.command
+import
org.apache.spark.sql.execution.command.DDLCommandTestUtils.V1_COMMAND_VERSION
/**
* This base suite contains unified tests for the `SHOW CREATE TABLE` command
that checks V1
@@ -31,7 +32,10 @@ import org.apache.spark.sql.execution.command
*/
trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
with command.TestsV1AndV2Commands {
- override def fullName: String = s"$ns.$table"
+ override def fullName: String = commandVersion match {
+ case V1_COMMAND_VERSION => s"$ns.$table"
+ case _ => s"$catalog.$ns.$table"
+ }
test("show create table[simple]") {
// todo After SPARK-37517 unify the testcase both v1 and v2
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
index 906194854b4..adda9dcfffe 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
@@ -144,4 +144,20 @@ class ShowCreateTableSuite extends
command.ShowCreateTableSuiteBase with Command
))
}
}
+
+ test("should quote identifiers with special characters") {
+ withNamespaceAndTable("`a_schema-with+special^chars`",
"`a_table-with+special^chars`") { t =>
+ sql(s"""
+ |CREATE TABLE $t (
+ | a bigint NOT NULL,
+ | b bigint
+ |) $defaultUsing
+ """.stripMargin)
+ val showDDL = getShowCreateDDL(t)
+ assert(
+ showDDL(0) == s"CREATE TABLE
test_catalog.`a_schema-with+special^chars`." +
+ s"`a_table-with+special^chars` ("
+ )
+ }
+ }
}
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
index a2e1ee24f03..55a27f336db 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
@@ -30,6 +30,7 @@ import org.apache.spark.sql.internal.HiveSerDe
*/
class ShowCreateTableSuite extends v1.ShowCreateTableSuiteBase with
CommandSuiteBase {
override def commandVersion: String =
super[ShowCreateTableSuiteBase].commandVersion
+ def nsTable: String = s"$ns.$table"
override def getShowCreateDDL(table: String, serde: Boolean = false):
Array[String] = {
super.getShowCreateDDL(table,
serde).filter(!_.startsWith("'transient_lastDdlTime'"))
@@ -48,7 +49,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
|)
""".stripMargin
)
- val expected = s"CREATE TABLE $fullName ( c1 INT COMMENT 'bla', c2
STRING)" +
+ val expected = s"CREATE TABLE $nsTable ( c1 INT COMMENT 'bla', c2
STRING)" +
" ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'" +
" WITH SERDEPROPERTIES ( 'serialization.format' = '1')" +
" STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'" +
@@ -73,7 +74,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
|)
""".stripMargin
)
- val expected = s"CREATE EXTERNAL TABLE $fullName ( c1 INT COMMENT
'bla', c2 STRING)" +
+ val expected = s"CREATE EXTERNAL TABLE $nsTable ( c1 INT COMMENT
'bla', c2 STRING)" +
s" ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'" +
s" WITH SERDEPROPERTIES ( 'serialization.format' = '1')" +
s" STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'"
+
@@ -100,7 +101,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
|)
""".stripMargin
)
- val expected = s"CREATE TABLE $fullName ( c1 INT COMMENT 'bla', c2
STRING)" +
+ val expected = s"CREATE TABLE $nsTable ( c1 INT COMMENT 'bla', c2
STRING)" +
" COMMENT 'bla' PARTITIONED BY (p1 BIGINT COMMENT 'bla', p2 STRING)" +
" ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'" +
" WITH SERDEPROPERTIES ( 'serialization.format' = '1')" +
@@ -124,7 +125,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
|NULL DEFINED AS 'NaN'
""".stripMargin
)
- val expected = s"CREATE TABLE $fullName ( c1 INT COMMENT 'bla', c2
STRING)" +
+ val expected = s"CREATE TABLE $nsTable ( c1 INT COMMENT 'bla', c2
STRING)" +
" ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'" +
" WITH SERDEPROPERTIES (" +
" 'colelction.delim' = '@'," +
@@ -148,7 +149,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
|STORED AS PARQUET
""".stripMargin
)
- val expected = s"CREATE TABLE $fullName ( c1 INT COMMENT 'bla', c2
STRING)" +
+ val expected = s"CREATE TABLE $nsTable ( c1 INT COMMENT 'bla', c2
STRING)" +
" ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'" +
" WITH SERDEPROPERTIES ( 'serialization.format' = '1')" +
" STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'" +
@@ -175,7 +176,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
| OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
""".stripMargin
)
- val expected = s"CREATE TABLE $fullName ( c1 INT COMMENT 'bla', c2
STRING)" +
+ val expected = s"CREATE TABLE $nsTable ( c1 INT COMMENT 'bla', c2
STRING)" +
" ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'" +
" WITH SERDEPROPERTIES (" +
" 'field.delim' = ','," +
@@ -197,7 +198,7 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
|INTO 2 BUCKETS
""".stripMargin
)
- val expected = s"CREATE TABLE $fullName ( a INT, b STRING)" +
+ val expected = s"CREATE TABLE $nsTable ( a INT, b STRING)" +
" CLUSTERED BY (a) SORTED BY (b ASC) INTO 2 BUCKETS" +
" ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'" +
" WITH SERDEPROPERTIES ( 'serialization.format' = '1')" +
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]