MaxGekk commented on a change in pull request #34719:
URL: https://github.com/apache/spark/pull/34719#discussion_r758189709



##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableParserSuite.scala
##########
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, 
UnresolvedTableOrView}
+import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
+import org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable
+
+class ShowCreateTableParserSuite extends AnalysisTest {
+  test("SHOW CREATE table") {
+    comparePlans(
+      parsePlan("SHOW CREATE TABLE a.b.c"),
+      ShowCreateTable(
+        UnresolvedTableOrView(Seq("a", "b", "c"), "SHOW CREATE TABLE", 
allowTempView = false)))
+
+    comparePlans(
+      parsePlan("SHOW CREATE TABLE a.b.c AS SERDE"),

Review comment:
       The doc 
https://github.com/apache/spark/blob/master/docs/sql-ref-syntax-aux-show-create-table.md
 doesn't describe the `SERDE` option. Could you open an JIRA if such ticket 
doesn't exist.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"

Review comment:
       Just inline `db` and `table` into `withNamespaceAndTable` since they are 
used only once.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {

Review comment:
       Why it is in upper case? Could you align test titles to other unified 
test suites

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {
+      sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or 
permanent view."))
+    }
+
+    withGlobalTempView(viewName) {
+      sql(s"CREATE GLOBAL TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val globalTempViewDb = 
spark.sessionState.catalog.globalTempViewManager.database
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $globalTempViewDb.$viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$globalTempViewDb.$viewName is a temp view. " +
+          "'SHOW CREATE TABLE' expects a table or permanent view."))
+    }
+  }
+
+  test("SPARK-36012: ADD NULL FLAG WHEN SHOW CREATE TABLE") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(
+        s"""
+           |CREATE TABLE $t (
+           |  a bigint NOT NULL,
+           |  b bigint
+           |)
+           |USING ${classOf[SimpleInsertSource].getName}
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("

Review comment:
       Can you incapsulate catalog specific things in separate traits. 

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {
+      sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or 
permanent view."))
+    }
+
+    withGlobalTempView(viewName) {
+      sql(s"CREATE GLOBAL TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val globalTempViewDb = 
spark.sessionState.catalog.globalTempViewManager.database
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $globalTempViewDb.$viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$globalTempViewDb.$viewName is a temp view. " +
+          "'SHOW CREATE TABLE' expects a table or permanent view."))
+    }
+  }
+
+  test("SPARK-36012: ADD NULL FLAG WHEN SHOW CREATE TABLE") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(
+        s"""
+           |CREATE TABLE $t (
+           |  a bigint NOT NULL,
+           |  b bigint
+           |)
+           |USING ${classOf[SimpleInsertSource].getName}
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` BIGINT NOT NULL,")
+      assert(showDDL(2) == "`b` BIGINT)")
+      assert(showDDL(3) == s"USING ${classOf[SimpleInsertSource].getName}")
+    }
+  }
+
+  test("SPARK-24911: KEEP QUOTES FOR NESTED FIELDS") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t (`a` STRUCT<`b`: STRING>) $defaultUsing")
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` STRUCT<`b`: STRING>)")
+      if (catalogVersion == "Hive V1") {

Review comment:
       If there are diffs, please, put tests to the catalog specific test 
suites. This will help to unify command's output in the future otherwise we 
will need to review all base test suite and find all such `if`s

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {

Review comment:
       `CATS`? What does the test check? Should be clear looking at the test 
title.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {
+    val table = "ddl_test"
+    withTable(table) {
+      sql(
+        s"""CREATE TABLE $table
+           |USING json
+           |PARTITIONED BY (c)
+           |CLUSTERED BY (a) SORTED BY (b) INTO 2 BUCKETS
+           |COMMENT 'This is a comment'
+           |TBLPROPERTIES ('a' = '1')
+           |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c
+         """.stripMargin
+      )
+      checkCreateTable(table)
+    }
+  }
+
+  protected def checkCreateTable(table: String, serde: Boolean = false): Unit 
= {
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "TABLE", 
serde)
+  }
+
+  protected def checkCreateView(table: String, serde: Boolean = false): Unit = 
{
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "VIEW", 
serde)
+  }
+
+  protected def checkCreateTableOrView(
+      table: TableIdentifier,
+      checkType: String,
+      serde: Boolean): Unit = {
+    val db = table.database.getOrElse("default")
+    val expected = spark.sharedState.externalCatalog.getTable(db, table.table)
+    val shownDDL = if (serde) {
+      sql(s"SHOW CREATE TABLE ${table.quotedString} AS 
SERDE").head().getString(0)
+    } else {
+      sql(s"SHOW CREATE TABLE ${table.quotedString}").head().getString(0)
+    }
+    sql(s"DROP $checkType ${table.quotedString}")
+    try {
+      sql(shownDDL)
+      val actual = spark.sharedState.externalCatalog.getTable(db, table.table)
+      checkCatalogTables(expected, actual)
+    } finally {
+      sql(s"DROP $checkType IF EXISTS ${table.table}")
+    }
+  }
+
+  protected def checkCatalogTables(expected: CatalogTable, actual: 
CatalogTable): Unit = {
+    assert(CatalogTable.normalize(actual) == CatalogTable.normalize(expected))
+  }
+}
+
+/**
+ * The class contains tests for the `SHOW CREATE TABLE` command to check V1 
In-Memory
+ * table catalog.
+ */
+class ShowCreateTableSuite extends ShowCreateTableSuiteBase with 
CommandSuiteBase {
+  override def commandVersion: String = 
super[ShowCreateTableSuiteBase].commandVersion
+
+  test("SHOW CREATE TABLE") {
+    val t = "tbl"
+    withTable(t) {
+      sql(
+        s"""
+           |CREATE TABLE $t (
+           |  a bigint NOT NULL,
+           |  b bigint,
+           |  c bigint,
+           |  `extraCol` ARRAY<INT>,
+           |  `<another>` STRUCT<x: INT, y: ARRAY<BOOLEAN>>
+           |)
+           |$defaultUsing
+           |OPTIONS (
+           |  from = 0,
+           |  to = 1,
+           |  via = 2)
+           |COMMENT 'This is a comment'
+           |TBLPROPERTIES ('prop1' = '1', 'prop2' = '2', 'prop3' = 3, 'prop4' 
= 4)
+           |PARTITIONED BY (a)
+           |LOCATION '/tmp'
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      assert(showDDL === Array(
+        s"CREATE TABLE `default`.`$t` (",
+        "`b` BIGINT,",
+        "`c` BIGINT,",
+        "`extraCol` ARRAY<INT>,",
+        "`<another>` STRUCT<`x`: INT, `y`: ARRAY<BOOLEAN>>,",
+        "`a` BIGINT NOT NULL)",
+        defaultUsing,
+        "OPTIONS (",
+        "`from` '0',",
+        "`to` '1',",
+        "`via` '2')",
+        "PARTITIONED BY (a)",
+        "COMMENT 'This is a comment'",
+        "LOCATION 'file:/tmp'",
+        "TBLPROPERTIES (",
+        "'prop1' = '1',",
+        "'prop2' = '2',",
+        "'prop3' = '3',",
+        "'prop4' = '4')"
+      ))
+    }

Review comment:
       Please, place the diffs to catalog specific test suites instead of 
describing the diffs in text. Also, open JIRAs and add TODOs to fix the 
differences.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {
+    val table = "ddl_test"
+    withTable(table) {
+      sql(
+        s"""CREATE TABLE $table
+           |USING json

Review comment:
       Why not $defaultUsing?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {
+      sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or 
permanent view."))
+    }
+
+    withGlobalTempView(viewName) {
+      sql(s"CREATE GLOBAL TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val globalTempViewDb = 
spark.sessionState.catalog.globalTempViewManager.database
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $globalTempViewDb.$viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$globalTempViewDb.$viewName is a temp view. " +
+          "'SHOW CREATE TABLE' expects a table or permanent view."))
+    }
+  }
+
+  test("SPARK-36012: ADD NULL FLAG WHEN SHOW CREATE TABLE") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(
+        s"""
+           |CREATE TABLE $t (
+           |  a bigint NOT NULL,
+           |  b bigint
+           |)
+           |USING ${classOf[SimpleInsertSource].getName}
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` BIGINT NOT NULL,")
+      assert(showDDL(2) == "`b` BIGINT)")
+      assert(showDDL(3) == s"USING ${classOf[SimpleInsertSource].getName}")
+    }
+  }
+
+  test("SPARK-24911: KEEP QUOTES FOR NESTED FIELDS") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t (`a` STRUCT<`b`: STRING>) $defaultUsing")
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` STRUCT<`b`: STRING>)")
+      if (catalogVersion == "Hive V1") {
+        assert(showDDL(2) == "USING text")
+      } else {
+        assert(showDDL(2) == defaultUsing)
+      }
+    }
+  }
+
+  test("DATA SOURCE TABLE WITH USER SPECIFIED SCHEMA") {
+    val db = "ns1"
+    val table = "ddl_test"
+    withNamespaceAndTable(db, table) { t =>
+      val jsonFilePath = 
Utils.getSparkClassLoader.getResource("sample.json").getFile
+      sql(
+        s"""CREATE TABLE $t (
+           |  a STRING,
+           |  b STRING,
+           |  `extra col` ARRAY<INT>,
+           |  `<another>` STRUCT<x: INT, y: ARRAY<BOOLEAN>>
+           |)
+           |USING json
+           |OPTIONS (
+           | PATH '$jsonFilePath'
+           |)
+         """.stripMargin
+      )
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` STRING,")
+      assert(showDDL(2) == "`b` STRING,")
+      assert(showDDL(3) == "`extra col` ARRAY<INT>,")
+      assert(showDDL(4) == "`<another>` STRUCT<`x`: INT, `y`: 
ARRAY<BOOLEAN>>)")
+      assert(showDDL(5) == "USING json")
+      // V2 showDDL(6) == LOCATION 'jsonFilePath'
+      // V1 showDDL(6) == LOCATION 'file:jsonFilePath'
+    }
+  }
+
+  def getShowCreateDDL(showCreateTableSql: String): Array[String] = {
+    sql(showCreateTableSql).head().getString(0).split("\n").map(_.trim)
+  }

Review comment:
       How about inverse way: take an expected output and compare it to command 
output?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {
+    val table = "ddl_test"
+    withTable(table) {
+      sql(
+        s"""CREATE TABLE $table
+           |USING json
+           |PARTITIONED BY (c)
+           |CLUSTERED BY (a) SORTED BY (b) INTO 2 BUCKETS
+           |COMMENT 'This is a comment'
+           |TBLPROPERTIES ('a' = '1')
+           |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c
+         """.stripMargin
+      )
+      checkCreateTable(table)
+    }
+  }
+
+  protected def checkCreateTable(table: String, serde: Boolean = false): Unit 
= {
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "TABLE", 
serde)
+  }
+
+  protected def checkCreateView(table: String, serde: Boolean = false): Unit = 
{
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "VIEW", 
serde)
+  }
+
+  protected def checkCreateTableOrView(
+      table: TableIdentifier,
+      checkType: String,
+      serde: Boolean): Unit = {
+    val db = table.database.getOrElse("default")
+    val expected = spark.sharedState.externalCatalog.getTable(db, table.table)
+    val shownDDL = if (serde) {
+      sql(s"SHOW CREATE TABLE ${table.quotedString} AS 
SERDE").head().getString(0)
+    } else {
+      sql(s"SHOW CREATE TABLE ${table.quotedString}").head().getString(0)
+    }
+    sql(s"DROP $checkType ${table.quotedString}")
+    try {
+      sql(shownDDL)
+      val actual = spark.sharedState.externalCatalog.getTable(db, table.table)
+      checkCatalogTables(expected, actual)
+    } finally {
+      sql(s"DROP $checkType IF EXISTS ${table.table}")
+    }

Review comment:
       withTable has more complex logic in handling exceptions, see:
   ```scala
     protected def withTable(tableNames: String*)(f: => Unit): Unit = {
       Utils.tryWithSafeFinally(f) {
         tableNames.foreach { name =>
           spark.sql(s"DROP TABLE IF EXISTS $name")
         }
       }
     }
   ```
   
https://github.com/apache/spark/blob/c6c72a453b9958c32a16be3f199e256dc7bc17ae/core/src/main/scala/org/apache/spark/util/Utils.scala#L1465-L1485
   Are you sure that your custom code is reliable enough?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {
+    val table = "ddl_test"
+    withTable(table) {
+      sql(
+        s"""CREATE TABLE $table
+           |USING json
+           |PARTITIONED BY (c)
+           |CLUSTERED BY (a) SORTED BY (b) INTO 2 BUCKETS
+           |COMMENT 'This is a comment'
+           |TBLPROPERTIES ('a' = '1')
+           |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c
+         """.stripMargin
+      )
+      checkCreateTable(table)
+    }
+  }
+
+  protected def checkCreateTable(table: String, serde: Boolean = false): Unit 
= {
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "TABLE", 
serde)
+  }
+
+  protected def checkCreateView(table: String, serde: Boolean = false): Unit = 
{
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "VIEW", 
serde)
+  }
+
+  protected def checkCreateTableOrView(
+      table: TableIdentifier,
+      checkType: String,
+      serde: Boolean): Unit = {
+    val db = table.database.getOrElse("default")
+    val expected = spark.sharedState.externalCatalog.getTable(db, table.table)
+    val shownDDL = if (serde) {
+      sql(s"SHOW CREATE TABLE ${table.quotedString} AS 
SERDE").head().getString(0)
+    } else {
+      sql(s"SHOW CREATE TABLE ${table.quotedString}").head().getString(0)
+    }
+    sql(s"DROP $checkType ${table.quotedString}")
+    try {
+      sql(shownDDL)
+      val actual = spark.sharedState.externalCatalog.getTable(db, table.table)
+      checkCatalogTables(expected, actual)
+    } finally {
+      sql(s"DROP $checkType IF EXISTS ${table.table}")
+    }
+  }
+
+  protected def checkCatalogTables(expected: CatalogTable, actual: 
CatalogTable): Unit = {
+    assert(CatalogTable.normalize(actual) == CatalogTable.normalize(expected))
+  }
+}
+
+/**
+ * The class contains tests for the `SHOW CREATE TABLE` command to check V1 
In-Memory
+ * table catalog.
+ */
+class ShowCreateTableSuite extends ShowCreateTableSuiteBase with 
CommandSuiteBase {
+  override def commandVersion: String = 
super[ShowCreateTableSuiteBase].commandVersion
+
+  test("SHOW CREATE TABLE") {
+    val t = "tbl"
+    withTable(t) {

Review comment:
       Use withNamespaceAndTable

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {
+      sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or 
permanent view."))
+    }
+
+    withGlobalTempView(viewName) {
+      sql(s"CREATE GLOBAL TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val globalTempViewDb = 
spark.sessionState.catalog.globalTempViewManager.database
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $globalTempViewDb.$viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$globalTempViewDb.$viewName is a temp view. " +
+          "'SHOW CREATE TABLE' expects a table or permanent view."))
+    }
+  }
+
+  test("SPARK-36012: ADD NULL FLAG WHEN SHOW CREATE TABLE") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>

Review comment:
       Use "ns1" and "tbl" directly since they are used only once.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {

Review comment:
       Do you really test all catalogs in such way?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {
+      sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or 
permanent view."))
+    }
+
+    withGlobalTempView(viewName) {
+      sql(s"CREATE GLOBAL TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val globalTempViewDb = 
spark.sessionState.catalog.globalTempViewManager.database
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $globalTempViewDb.$viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$globalTempViewDb.$viewName is a temp view. " +
+          "'SHOW CREATE TABLE' expects a table or permanent view."))
+    }
+  }
+
+  test("SPARK-36012: ADD NULL FLAG WHEN SHOW CREATE TABLE") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(
+        s"""
+           |CREATE TABLE $t (
+           |  a bigint NOT NULL,
+           |  b bigint
+           |)
+           |USING ${classOf[SimpleInsertSource].getName}
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` BIGINT NOT NULL,")
+      assert(showDDL(2) == "`b` BIGINT)")
+      assert(showDDL(3) == s"USING ${classOf[SimpleInsertSource].getName}")
+    }
+  }
+
+  test("SPARK-24911: KEEP QUOTES FOR NESTED FIELDS") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t (`a` STRUCT<`b`: STRING>) $defaultUsing")
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` STRUCT<`b`: STRING>)")
+      if (catalogVersion == "Hive V1") {
+        assert(showDDL(2) == "USING text")
+      } else {
+        assert(showDDL(2) == defaultUsing)
+      }
+    }
+  }
+
+  test("DATA SOURCE TABLE WITH USER SPECIFIED SCHEMA") {
+    val db = "ns1"
+    val table = "ddl_test"
+    withNamespaceAndTable(db, table) { t =>
+      val jsonFilePath = 
Utils.getSparkClassLoader.getResource("sample.json").getFile
+      sql(
+        s"""CREATE TABLE $t (
+           |  a STRING,
+           |  b STRING,
+           |  `extra col` ARRAY<INT>,
+           |  `<another>` STRUCT<x: INT, y: ARRAY<BOOLEAN>>
+           |)
+           |USING json
+           |OPTIONS (
+           | PATH '$jsonFilePath'
+           |)
+         """.stripMargin
+      )
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` STRING,")
+      assert(showDDL(2) == "`b` STRING,")
+      assert(showDDL(3) == "`extra col` ARRAY<INT>,")
+      assert(showDDL(4) == "`<another>` STRUCT<`x`: INT, `y`: 
ARRAY<BOOLEAN>>)")
+      assert(showDDL(5) == "USING json")
+      // V2 showDDL(6) == LOCATION 'jsonFilePath'
+      // V1 showDDL(6) == LOCATION 'file:jsonFilePath'

Review comment:
       Can you write a function which accept one expected string, splits it to 
lines and compares to the command output?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowCreateTableSuiteBase.scala
##########
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{AnalysisException, QueryTest}
+import org.apache.spark.sql.sources.SimpleInsertSource
+import org.apache.spark.util.Utils
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that check V1 and V2
+ * table catalogs. The tests that cannot run for all supported catalogs are 
located in more
+ * specific test suites:
+ *
+ *   - V2 table catalog tests: 
`org.apache.spark.sql.execution.command.v2.ShowCreateTableSuite`
+ *   - V1 table catalog tests:
+ *     `org.apache.spark.sql.execution.command.v1.ShowCreateTableSuiteBase`
+ *     - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *     - V1 Hive External catalog:
+*        `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends QueryTest with DDLCommandTestUtils {
+  override val command = "SHOW CREATE TABLE"
+
+  test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(s"CREATE TABLE $t(v VARCHAR(3), c CHAR(5)) $defaultUsing")
+      val rest = sql(s"SHOW CREATE TABLE $t").head().getString(0)
+      assert(rest.contains("VARCHAR(3)"))
+      assert(rest.contains("CHAR(5)"))
+    }
+  }
+
+  test("DO NOT SUPPORT TEMP VIEW") {
+    val viewName = "spark_28383"
+    withTempView(viewName) {
+      sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or 
permanent view."))
+    }
+
+    withGlobalTempView(viewName) {
+      sql(s"CREATE GLOBAL TEMPORARY VIEW $viewName AS SELECT 1 AS a")
+      val globalTempViewDb = 
spark.sessionState.catalog.globalTempViewManager.database
+      val ex = intercept[AnalysisException] {
+        sql(s"SHOW CREATE TABLE $globalTempViewDb.$viewName")
+      }
+      assert(ex.getMessage.contains(
+        s"$globalTempViewDb.$viewName is a temp view. " +
+          "'SHOW CREATE TABLE' expects a table or permanent view."))
+    }
+  }
+
+  test("SPARK-36012: ADD NULL FLAG WHEN SHOW CREATE TABLE") {
+    val db = "ns1"
+    val table = "tbl"
+    withNamespaceAndTable(db, table) { t =>
+      sql(
+        s"""
+           |CREATE TABLE $t (
+           |  a bigint NOT NULL,
+           |  b bigint
+           |)
+           |USING ${classOf[SimpleInsertSource].getName}
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(s"SHOW CREATE TABLE $t")
+      // if v2 showDDL(0) == s"CREATE TABLE $t ("
+      // if v1 showDDL(0) == s"CREATE TABLE `$db`.`$table` ("
+      assert(showDDL(1) == "`a` BIGINT NOT NULL,")
+      assert(showDDL(2) == "`b` BIGINT)")
+      assert(showDDL(3) == s"USING ${classOf[SimpleInsertSource].getName}")
+    }
+  }
+
+  test("SPARK-24911: KEEP QUOTES FOR NESTED FIELDS") {

Review comment:
       Why is the test title in upper case?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {
+    val table = "ddl_test"
+    withTable(table) {

Review comment:
       Use `withNamespaceAndTable`, so, we could move it the base test trait in 
the future w/o any modifications.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
##########
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW CREATE TABLE` command 
that checks V1
+ * table catalogs. The tests that cannot run for all V1 catalogs are located 
in more
+ * specific test suites:
+ *
+ *   - V1 In-Memory catalog: 
`org.apache.spark.sql.execution.command.v1.ShowCreateTableSuite`
+ *   - V1 Hive External catalog:
+ *     `org.apache.spark.sql.hive.execution.command.ShowCreateTableSuite`
+ */
+trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
+    with command.TestsV1AndV2Commands {
+
+  test("CATS") {
+    val table = "ddl_test"
+    withTable(table) {
+      sql(
+        s"""CREATE TABLE $table
+           |USING json
+           |PARTITIONED BY (c)
+           |CLUSTERED BY (a) SORTED BY (b) INTO 2 BUCKETS
+           |COMMENT 'This is a comment'
+           |TBLPROPERTIES ('a' = '1')
+           |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c
+         """.stripMargin
+      )
+      checkCreateTable(table)
+    }
+  }
+
+  protected def checkCreateTable(table: String, serde: Boolean = false): Unit 
= {
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "TABLE", 
serde)
+  }
+
+  protected def checkCreateView(table: String, serde: Boolean = false): Unit = 
{
+    checkCreateTableOrView(TableIdentifier(table, Some("default")), "VIEW", 
serde)
+  }
+
+  protected def checkCreateTableOrView(
+      table: TableIdentifier,
+      checkType: String,
+      serde: Boolean): Unit = {
+    val db = table.database.getOrElse("default")
+    val expected = spark.sharedState.externalCatalog.getTable(db, table.table)
+    val shownDDL = if (serde) {
+      sql(s"SHOW CREATE TABLE ${table.quotedString} AS 
SERDE").head().getString(0)
+    } else {
+      sql(s"SHOW CREATE TABLE ${table.quotedString}").head().getString(0)
+    }
+    sql(s"DROP $checkType ${table.quotedString}")

Review comment:
       The table should exist before the check otherwise other test leaves 
garbage which can be considered as a bug. Please, remove the `DROP ..` command.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to