This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 53c1f31dc26b [SPARK-49119][SQL] Fix the inconsistency of syntax `show
columns` between v1 and v2
53c1f31dc26b is described below
commit 53c1f31dc26bb56d56e0b71b144910df5d376a76
Author: panbingkun <[email protected]>
AuthorDate: Fri Aug 30 16:15:01 2024 +0800
[SPARK-49119][SQL] Fix the inconsistency of syntax `show columns` between
v1 and v2
### What changes were proposed in this pull request?
The pr aims to
- fix the `inconsistency` of syntax `show columns` between `v1` and `v2`.
- assign a name `SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE` to the error
condition `_LEGACY_ERROR_TEMP_1057`.
- unify v1 and v2 `SHOW COLUMNS ...` tests.
- move some UT related to `SHOW COLUMNS` from `DDLSuite` to
`command/ShowColumnsSuiteBase` or `v1/ShowColumnsSuiteBase`.
- move some UT related to `SHOW COLUMNS` from `DDLParserSuite` and
`ErrorParserSuite` to `ShowColumnsParserSuite`.
### Why are the changes needed?
In `AstBuilder`, we have `a comment` that explains as follows:
https://github.com/apache/spark/blob/2a752105091ef95f994526b15bae2159657c8ed0/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala#L5054-L5055
However, in our v2 of the syntax `show columns` implementation, we `did
not` perform the above checks, as shown below:
```
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (col1 int, col2 string) $defaultUsing")
sql(s"SHOW COLUMNS IN $t IN ns1")
}
```
- Before (inconsistent, v1 will fail, but v2 will success)
v1:
```
[SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE] SHOW COLUMNS with conflicting
namespace: `ns1` != `ns`.
```
v2:
```
Execute successfully.
```
#### so, we should fix it.
- After (consistent, v1 & v2 all will fail)
v1:
```
[SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE] SHOW COLUMNS with conflicting
namespace: `ns1` != `ns`.
```
v2:
```
[SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE] SHOW COLUMNS with conflicting
namespace: `ns1` != `ns`.
```
### Does this PR introduce _any_ user-facing change?
Yes, for v2 tables, in syntax `SHOW COLUMNS {FROM | IN} {tableName} {FROM |
IN} {namespace}`, if the namespace (`second parameter`) is different from the
namespace of the table(`first parameter`), the command will succeed without any
awareness before this PR, after this PR, it will report an error.
### How was this patch tested?
Add new UT.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #47628 from panbingkun/SPARK-49119.
Lead-authored-by: panbingkun <[email protected]>
Co-authored-by: Kent Yao <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 11 +--
.../spark/sql/errors/QueryCompilationErrors.scala | 11 +--
.../spark/sql/catalyst/parser/DDLParserSuite.scala | 23 -----
.../sql/catalyst/parser/ErrorParserSuite.scala | 4 -
.../catalyst/analysis/ResolveSessionCatalog.scala | 3 +-
.../datasources/v2/DataSourceV2Strategy.scala | 13 ++-
...olumnsTableExec.scala => ShowColumnsExec.scala} | 4 +-
.../analyzer-results/show_columns.sql.out | 7 +-
.../sql-tests/results/show_columns.sql.out | 7 +-
.../spark/sql/connector/DataSourceV2SQLSuite.scala | 10 ---
.../spark/sql/execution/command/DDLSuite.scala | 33 -------
.../execution/command/ShowColumnsParserSuite.scala | 55 ++++++++++++
.../execution/command/ShowColumnsSuiteBase.scala | 100 +++++++++++++++++++++
.../execution/command/v1/ShowColumnsSuite.scala | 55 ++++++++++++
.../execution/command/v2/ShowColumnsSuite.scala} | 17 +---
.../hive/execution/command/ShowColumnsSuite.scala} | 18 ++--
16 files changed, 255 insertions(+), 116 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 89d2627ef32e..496a90e5db34 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -3866,6 +3866,12 @@
],
"sqlState" : "42K08"
},
+ "SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE" : {
+ "message" : [
+ "SHOW COLUMNS with conflicting namespaces: <namespaceA> != <namespaceB>."
+ ],
+ "sqlState" : "42K05"
+ },
"SORT_BY_WITHOUT_BUCKETING" : {
"message" : [
"sortBy must be used together with bucketBy."
@@ -5685,11 +5691,6 @@
"ADD COLUMN with v1 tables cannot specify NOT NULL."
]
},
- "_LEGACY_ERROR_TEMP_1057" : {
- "message" : [
- "SHOW COLUMNS with conflicting databases: '<dbA>' != '<dbB>'."
- ]
- },
"_LEGACY_ERROR_TEMP_1058" : {
"message" : [
"Cannot create table with both USING <provider> and <serDeInfo>."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 51ab2eb06323..613e7cff1e42 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1045,13 +1045,14 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
)
}
- def showColumnsWithConflictDatabasesError(
- db: Seq[String], v1TableName: TableIdentifier): Throwable = {
+ def showColumnsWithConflictNamespacesError(
+ namespaceA: Seq[String],
+ namespaceB: Seq[String]): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1057",
+ errorClass = "SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE",
messageParameters = Map(
- "dbA" -> db.head,
- "dbB" -> v1TableName.database.get))
+ "namespaceA" -> toSQLId(namespaceA),
+ "namespaceB" -> toSQLId(namespaceB)))
}
def cannotCreateTableWithBothProviderAndSerdeError(
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 756ec95c70d2..d514f777e554 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -2406,29 +2406,6 @@ class DDLParserSuite extends AnalysisTest {
RefreshTable(UnresolvedTableOrView(Seq("a", "b", "c"), "REFRESH TABLE",
true)))
}
- test("show columns") {
- val sql1 = "SHOW COLUMNS FROM t1"
- val sql2 = "SHOW COLUMNS IN db1.t1"
- val sql3 = "SHOW COLUMNS FROM t1 IN db1"
- val sql4 = "SHOW COLUMNS FROM db1.t1 IN db1"
-
- val parsed1 = parsePlan(sql1)
- val expected1 = ShowColumns(UnresolvedTableOrView(Seq("t1"), "SHOW
COLUMNS", true), None)
- val parsed2 = parsePlan(sql2)
- val expected2 = ShowColumns(UnresolvedTableOrView(Seq("db1", "t1"), "SHOW
COLUMNS", true), None)
- val parsed3 = parsePlan(sql3)
- val expected3 =
- ShowColumns(UnresolvedTableOrView(Seq("db1", "t1"), "SHOW COLUMNS",
true), Some(Seq("db1")))
- val parsed4 = parsePlan(sql4)
- val expected4 =
- ShowColumns(UnresolvedTableOrView(Seq("db1", "t1"), "SHOW COLUMNS",
true), Some(Seq("db1")))
-
- comparePlans(parsed1, expected1)
- comparePlans(parsed2, expected2)
- comparePlans(parsed3, expected3)
- comparePlans(parsed4, expected4)
- }
-
test("alter view: add partition (not supported)") {
val sql =
"""ALTER VIEW a.b.c ADD IF NOT EXISTS PARTITION
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
index cd1556a2e791..e4f9b54680dc 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
@@ -141,10 +141,6 @@ class ErrorParserSuite extends AnalysisTest {
exception = parseException("SHOW TABLE EXTENDED IN hyphen-db LIKE
\"str\""),
errorClass = "INVALID_IDENTIFIER",
parameters = Map("ident" -> "hyphen-db"))
- checkError(
- exception = parseException("SHOW COLUMNS IN t FROM test-db"),
- errorClass = "INVALID_IDENTIFIER",
- parameters = Map("ident" -> "test-db"))
checkError(
exception = parseException("DESC SCHEMA EXTENDED test-db"),
errorClass = "INVALID_IDENTIFIER",
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
index 20e3b4e980f2..d569f1ed484c 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
@@ -330,7 +330,8 @@ class ResolveSessionCatalog(val catalogManager:
CatalogManager)
val resolver = conf.resolver
val db = ns match {
case Some(db) if v1TableName.database.exists(!resolver(_, db.head)) =>
- throw
QueryCompilationErrors.showColumnsWithConflictDatabasesError(db, v1TableName)
+ throw QueryCompilationErrors.showColumnsWithConflictNamespacesError(
+ Seq(db.head), Seq(v1TableName.database.get))
case _ => ns.map(_.head)
}
ShowColumnsCommand(db, v1TableName, output)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
index 89882997681c..112ee2c5450b 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
@@ -45,6 +45,7 @@ import org.apache.spark.sql.execution.{FilterExec,
InSubqueryExec, LeafExecNode,
import org.apache.spark.sql.execution.command.CommandUtils
import org.apache.spark.sql.execution.datasources.{DataSourceStrategy,
LogicalRelation, PushableColumnAndNestedColumn}
import
org.apache.spark.sql.execution.streaming.continuous.{WriteToContinuousDataSource,
WriteToContinuousDataSourceExec}
+import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.WAREHOUSE_PATH
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.storage.StorageLevel
@@ -477,7 +478,17 @@ class DataSourceV2Strategy(session: SparkSession) extends
Strategy with Predicat
Seq(part).asResolvedPartitionSpecs.head,
recacheTable(r)) :: Nil
- case ShowColumns(resolvedTable: ResolvedTable, _, output) =>
+ case ShowColumns(resolvedTable: ResolvedTable, ns, output) =>
+ ns match {
+ case Some(namespace) =>
+ val tableNamespace = resolvedTable.identifier.namespace()
+ if (namespace.length != tableNamespace.length ||
+
!namespace.zip(tableNamespace).forall(SQLConf.get.resolver.tupled)) {
+ throw
QueryCompilationErrors.showColumnsWithConflictNamespacesError(
+ namespace, tableNamespace.toSeq)
+ }
+ case _ =>
+ }
ShowColumnsExec(output, resolvedTable) :: Nil
case r @ ShowPartitions(
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsExec.scala
similarity index 92%
copy from
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
copy to
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsExec.scala
index e7a608938a04..e92607aa8716 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsExec.scala
@@ -26,8 +26,8 @@ import org.apache.spark.sql.execution.LeafExecNode
* Physical plan node for show columns from table.
*/
case class ShowColumnsExec(
- output: Seq[Attribute],
- resolvedTable: ResolvedTable) extends V2CommandExec with LeafExecNode {
+ output: Seq[Attribute],
+ resolvedTable: ResolvedTable) extends V2CommandExec with LeafExecNode {
override protected def run(): Seq[InternalRow] = {
resolvedTable.table.columns().map(f => toCatalystRow(f.name())).toSeq
}
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/show_columns.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/show_columns.sql.out
index 27e75187cdba..76c3b88a3ce6 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/show_columns.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/show_columns.sql.out
@@ -94,10 +94,11 @@ SHOW COLUMNS IN showdb.showcolumn1 FROM baddb
-- !query analysis
org.apache.spark.sql.AnalysisException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1057",
+ "errorClass" : "SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE",
+ "sqlState" : "42K05",
"messageParameters" : {
- "dbA" : "baddb",
- "dbB" : "showdb"
+ "namespaceA" : "`baddb`",
+ "namespaceB" : "`showdb`"
}
}
diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
index 9a0d82d3617a..bb4e7e08c6f5 100644
--- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
@@ -123,10 +123,11 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1057",
+ "errorClass" : "SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE",
+ "sqlState" : "42K05",
"messageParameters" : {
- "dbA" : "baddb",
- "dbB" : "showdb"
+ "namespaceA" : "`baddb`",
+ "namespaceB" : "`showdb`"
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index a61a266c1ed5..1d37c6aa4eb7 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2390,16 +2390,6 @@ class DataSourceV2SQLSuiteV1Filter
sql(s"UNCACHE TABLE IF EXISTS $t")
}
- test("SHOW COLUMNS") {
- val t = "testcat.ns1.ns2.tbl"
- withTable(t) {
- spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo")
- checkAnswer(sql(s"SHOW COLUMNS FROM $t IN testcat.ns1.ns2"),
Seq(Row("id"), Row("data")))
- checkAnswer(sql(s"SHOW COLUMNS in $t"), Seq(Row("id"), Row("data")))
- checkAnswer(sql(s"SHOW COLUMNS FROM $t"), Seq(Row("id"), Row("data")))
- }
- }
-
test("ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]") {
val t = "testcat.ns1.ns2.tbl"
withTable(t) {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index c06f44d0dd04..5c1090c288ed 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -1376,39 +1376,6 @@ abstract class DDLSuite extends QueryTest with
DDLSuiteBase {
}
}
- test("show columns - negative test") {
- // When case sensitivity is true, the user supplied database name in table
identifier
- // should match the supplied database name in case sensitive way.
- withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
- withTempDatabase { db =>
- val tabName = s"$db.showcolumn"
- withTable(tabName) {
- sql(s"CREATE TABLE $tabName(col1 int, col2 string) USING parquet ")
- checkError(
- exception = intercept[AnalysisException] {
- sql(s"SHOW COLUMNS IN $db.showcolumn FROM
${db.toUpperCase(Locale.ROOT)}")
- },
- errorClass = "_LEGACY_ERROR_TEMP_1057",
- parameters = Map("dbA" -> db.toUpperCase(Locale.ROOT), "dbB" -> db)
- )
- }
- }
- }
- }
-
- test("show columns - invalid db name") {
- withTable("tbl") {
- sql("CREATE TABLE tbl(col1 int, col2 string) USING parquet ")
- checkError(
- exception = intercept[AnalysisException] {
- sql("SHOW COLUMNS IN tbl FROM a.b.c")
- },
- errorClass = "REQUIRES_SINGLE_PART_NAMESPACE",
- parameters = Map("sessionCatalog" -> "spark_catalog", "namespace" ->
"`a`.`b`.`c`")
- )
- }
- }
-
test("SPARK-18009 calling toLocalIterator on commands") {
import scala.jdk.CollectionConverters._
val df = sql("show databases")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowColumnsParserSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowColumnsParserSuite.scala
new file mode 100644
index 000000000000..17a6df87aa0e
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowColumnsParserSuite.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.catalyst.analysis.{AnalysisTest,
UnresolvedTableOrView}
+import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
+import org.apache.spark.sql.catalyst.plans.logical.ShowColumns
+
+class ShowColumnsParserSuite extends AnalysisTest {
+
+ test("show columns") {
+ comparePlans(
+ parsePlan("SHOW COLUMNS IN a.b.c"),
+ ShowColumns(
+ UnresolvedTableOrView(Seq("a", "b", "c"), "SHOW COLUMNS",
allowTempView = true),
+ None))
+ comparePlans(
+ parsePlan("SHOW COLUMNS FROM a.b.c"),
+ ShowColumns(
+ UnresolvedTableOrView(Seq("a", "b", "c"), "SHOW COLUMNS",
allowTempView = true),
+ None))
+ comparePlans(
+ parsePlan("SHOW COLUMNS IN a.b.c FROM a.b"),
+ ShowColumns(UnresolvedTableOrView(Seq("a", "b", "c"), "SHOW COLUMNS",
allowTempView = true),
+ Some(Seq("a", "b"))))
+ comparePlans(
+ parsePlan("SHOW COLUMNS FROM a.b.c IN a.b"),
+ ShowColumns(UnresolvedTableOrView(Seq("a", "b", "c"), "SHOW COLUMNS",
allowTempView = true),
+ Some(Seq("a", "b"))))
+ }
+
+ test("illegal characters in unquoted identifier") {
+ checkError(
+ exception = parseException(parsePlan)("SHOW COLUMNS IN t FROM test-db"),
+ errorClass = "INVALID_IDENTIFIER",
+ sqlState = "42602",
+ parameters = Map("ident" -> "test-db")
+ )
+ }
+}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowColumnsSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowColumnsSuiteBase.scala
new file mode 100644
index 000000000000..c6f4e0bbd01a
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowColumnsSuiteBase.scala
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import java.util.Locale
+
+import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
+import org.apache.spark.sql.internal.SQLConf
+
+/**
+ * This base suite contains unified tests for the `SHOW COLUMNS ...` command
that
+ * check V1 and V2 table catalogs. The tests that cannot run for all supported
catalogs are
+ * located in more specific test suites:
+ *
+ * - V2 table catalog tests:
+ * `org.apache.spark.sql.execution.command.v2.ShowColumnsSuite`
+ * - V1 table catalog tests:
+ * `org.apache.spark.sql.execution.command.v1.ShowColumnsSuiteBase`
+ * - V1 In-Memory catalog:
+ * `org.apache.spark.sql.execution.command.v1.ShowColumnsSuite`
+ * - V1 Hive External catalog:
+ * `org.apache.spark.sql.hive.execution.command.ShowColumnsSuite`
+ */
+trait ShowColumnsSuiteBase extends QueryTest with DDLCommandTestUtils {
+ override val command = "SHOW COLUMNS ..."
+
+ test("basic test") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t(col1 int, col2 string) $defaultUsing")
+ val expected = Seq(Row("col1"), Row("col2"))
+ checkAnswer(sql(s"SHOW COLUMNS FROM $t IN ns"), expected)
+ checkAnswer(sql(s"SHOW COLUMNS IN $t FROM ns"), expected)
+ checkAnswer(sql(s"SHOW COLUMNS IN $t"), expected)
+ }
+ }
+
+ test("negative test - the table does not exist") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t(col1 int, col2 string) $defaultUsing")
+
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"SHOW COLUMNS IN tbl IN ns1")
+ },
+ errorClass = "TABLE_OR_VIEW_NOT_FOUND",
+ parameters = Map("relationName" -> "`ns1`.`tbl`"),
+ context = ExpectedContext(fragment = "tbl", start = 16, stop = 18)
+ )
+ }
+ }
+
+ test("the namespace of the table conflicts with the specified namespace") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t(col1 int, col2 string) $defaultUsing")
+
+ val sqlText1 = s"SHOW COLUMNS IN $t IN ns1"
+ val sqlText2 = s"SHOW COLUMNS IN $t FROM
${"ns".toUpperCase(Locale.ROOT)}"
+
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(sqlText1)
+ },
+ errorClass = "SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE",
+ parameters = Map(
+ "namespaceA" -> s"`ns1`",
+ "namespaceB" -> s"`ns`"
+ )
+ )
+ // When case sensitivity is true, the user supplied namespace name in
table identifier
+ // should match the supplied namespace name in case-sensitive way.
+ withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(sqlText2)
+ },
+ errorClass = "SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE",
+ parameters = Map(
+ "namespaceA" -> s"`${"ns".toUpperCase(Locale.ROOT)}`",
+ "namespaceB" -> "`ns`"
+ )
+ )
+ }
+ }
+ }
+}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
new file mode 100644
index 000000000000..e9459a224486
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.execution.command
+
+/**
+ * This base suite contains unified tests for the `SHOW COLUMNS ...` command
that check V1 table
+ * catalogs. The tests that cannot run for all V1 catalogs are located in more
specific test suites:
+ *
+ * - V1 In-Memory catalog:
+ * `org.apache.spark.sql.execution.command.v1.ShowColumnsSuite`
+ * - V1 Hive External catalog:
+ * `org.apache.spark.sql.hive.execution.command.ShowColumnsSuite`
+ */
+trait ShowColumnsSuiteBase extends command.ShowColumnsSuiteBase {
+
+ test("invalid db name") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t(col1 int, col2 string) $defaultUsing")
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("SHOW COLUMNS IN tbl FROM a.b.c")
+ },
+ errorClass = "REQUIRES_SINGLE_PART_NAMESPACE",
+ parameters = Map(
+ "sessionCatalog" -> catalog,
+ "namespace" -> "`a`.`b`.`c`"
+ )
+ )
+ }
+ }
+}
+
+/**
+ * The class contains tests for the `SHOW COLUMNS ...` command to check V1
In-Memory
+ * table catalog.
+ */
+class ShowColumnsSuite extends ShowColumnsSuiteBase with CommandSuiteBase
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowColumnsSuite.scala
similarity index 57%
copy from
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
copy to
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowColumnsSuite.scala
index e7a608938a04..64ddce85658e 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowColumnsSuite.scala
@@ -15,20 +15,11 @@
* limitations under the License.
*/
-package org.apache.spark.sql.execution.datasources.v2
+package org.apache.spark.sql.execution.command.v2
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.ResolvedTable
-import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.execution.LeafExecNode
+import org.apache.spark.sql.execution.command
/**
- * Physical plan node for show columns from table.
+ * The class contains tests for the `SHOW COLUMNS ...` command to check V2
table catalogs.
*/
-case class ShowColumnsExec(
- output: Seq[Attribute],
- resolvedTable: ResolvedTable) extends V2CommandExec with LeafExecNode {
- override protected def run(): Seq[InternalRow] = {
- resolvedTable.table.columns().map(f => toCatalystRow(f.name())).toSeq
- }
-}
+class ShowColumnsSuite extends command.ShowColumnsSuiteBase with
CommandSuiteBase
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowColumnsSuite.scala
similarity index 57%
rename from
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
rename to
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowColumnsSuite.scala
index e7a608938a04..4b36d00455af 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowColumnsTableExec.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowColumnsSuite.scala
@@ -15,20 +15,12 @@
* limitations under the License.
*/
-package org.apache.spark.sql.execution.datasources.v2
+package org.apache.spark.sql.hive.execution.command
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.ResolvedTable
-import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.execution.LeafExecNode
+import org.apache.spark.sql.execution.command.v1
/**
- * Physical plan node for show columns from table.
+ * The class contains tests for the `SHOW COLUMNS ...` command to check V1
Hive external
+ * table catalog.
*/
-case class ShowColumnsExec(
- output: Seq[Attribute],
- resolvedTable: ResolvedTable) extends V2CommandExec with LeafExecNode {
- override protected def run(): Seq[InternalRow] = {
- resolvedTable.table.columns().map(f => toCatalystRow(f.name())).toSeq
- }
-}
+class ShowColumnsSuite extends v1.ShowColumnsSuiteBase with CommandSuiteBase
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]