This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 7e4d6bdba0ff [SPARK-47258][SQL] Assign names to error classes
_LEGACY_ERROR_TEMP_127[0-5]
7e4d6bdba0ff is described below
commit 7e4d6bdba0ff053926adb59ad12cc063df0ad881
Author: Wei Guo <[email protected]>
AuthorDate: Mon Aug 26 13:31:59 2024 +0200
[SPARK-47258][SQL] Assign names to error classes _LEGACY_ERROR_TEMP_127[0-5]
### What changes were proposed in this pull request?
This PR renames a few error classes related to usage of SHOW CREATE TABLE
errors:
_LEGACY_ERROR_TEMP_1270 => UNSUPPORTED_SHOW_CREATE_TABLE.ON_TEMPORARY_VIEW
_LEGACY_ERROR_TEMP_1271 =>
UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE
_LEGACY_ERROR_TEMP_1272 =>
UNSUPPORTED_SHOW_CREATE_TABLE.ON_TRANSACTIONAL_HIVE_TABLE
_LEGACY_ERROR_TEMP_1273 =>
UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_SERDE_CONFIGURATION
_LEGACY_ERROR_TEMP_1274 =>
UNSUPPORTED_SHOW_CREATE_TABLE.ON_SPARK_DATA_SOURCE_TABLE_WITH_AS_SERDE
_LEGACY_ERROR_TEMP_1275 =>
UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE
Also, this PR changes tests in corresponding test suite to use checkError()
method which checks the error class name, context, error message parameters,
etc.
### Why are the changes needed?
Proper error names and messages improve user experience with Spark SQL.
### Does this PR introduce _any_ user-facing change?
Yes, this PR changes user-facing error class and message.
### How was this patch tested?
By running tests from `ShowCreateTableSuiteBase` and subclasses.
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #46770 from wayneguow/SPARK-47258.
Authored-by: Wei Guo <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 72 +++++++++++-----------
.../spark/sql/errors/QueryCompilationErrors.scala | 38 +++++-------
.../spark/sql/execution/command/tables.scala | 11 ++--
.../command/v1/ShowCreateTableSuite.scala | 5 +-
.../sql/hive/execution/HiveSQLViewSuite.scala | 14 +++--
.../execution/command/ShowCreateTableSuite.scala | 10 +--
6 files changed, 74 insertions(+), 76 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 9228b02e3747..0d7570df87f8 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -5081,6 +5081,43 @@
},
"sqlState" : "0A000"
},
+ "UNSUPPORTED_SHOW_CREATE_TABLE" : {
+ "message" : [
+ "Unsupported a SHOW CREATE TABLE command."
+ ],
+ "subClass" : {
+ "ON_DATA_SOURCE_TABLE_WITH_AS_SERDE" : {
+ "message" : [
+ "The table <tableName> is a Spark data source table. Please use SHOW
CREATE TABLE without AS SERDE instead."
+ ]
+ },
+ "ON_TEMPORARY_VIEW" : {
+ "message" : [
+ "The command is not supported on a temporary view <tableName>."
+ ]
+ },
+ "ON_TRANSACTIONAL_HIVE_TABLE" : {
+ "message" : [
+ "Failed to execute the command against transactional Hive table
<tableName>.",
+ "Please use SHOW CREATE TABLE <tableName> AS SERDE to show Hive DDL
instead."
+ ]
+ },
+ "WITH_UNSUPPORTED_FEATURE" : {
+ "message" : [
+ "Failed to execute the command against table/view <tableName> which
is created by Hive and uses the following unsupported features",
+ "<unsupportedFeatures>"
+ ]
+ },
+ "WITH_UNSUPPORTED_SERDE_CONFIGURATION" : {
+ "message" : [
+ "Failed to execute the command against the table <tableName> which
is created by Hive and uses the following unsupported serde configuration",
+ "<configs>",
+ "Please use SHOW CREATE TABLE <tableName> AS SERDE to show Hive DDL
instead."
+ ]
+ }
+ },
+ "sqlState" : "0A000"
+ },
"UNSUPPORTED_STREAMING_OPERATOR_WITHOUT_WATERMARK" : {
"message" : [
"<outputMode> output mode not supported for <statefulOperator> on
streaming DataFrames/DataSets without watermark."
@@ -6302,41 +6339,6 @@
"Failed to truncate table <tableIdentWithDB> when removing data of the
path: <path>."
]
},
- "_LEGACY_ERROR_TEMP_1270" : {
- "message" : [
- "SHOW CREATE TABLE is not supported on a temporary view: <table>."
- ]
- },
- "_LEGACY_ERROR_TEMP_1271" : {
- "message" : [
- "Failed to execute SHOW CREATE TABLE against table <table>, which is
created by Hive and uses the following unsupported feature(s)",
- "<unsupportedFeatures>",
- "Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL
instead."
- ]
- },
- "_LEGACY_ERROR_TEMP_1272" : {
- "message" : [
- "SHOW CREATE TABLE doesn't support transactional Hive table. Please use
`SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
- ]
- },
- "_LEGACY_ERROR_TEMP_1273" : {
- "message" : [
- "Failed to execute SHOW CREATE TABLE against table <table>, which is
created by Hive and uses the following unsupported serde configuration",
- "<configs>",
- "Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL
instead."
- ]
- },
- "_LEGACY_ERROR_TEMP_1274" : {
- "message" : [
- "<table> is a Spark data source table. Use `SHOW CREATE TABLE` without
`AS SERDE` instead."
- ]
- },
- "_LEGACY_ERROR_TEMP_1275" : {
- "message" : [
- "Failed to execute SHOW CREATE TABLE against table/view <table>, which
is created by Hive and uses the following unsupported feature(s)",
- "<features>."
- ]
- },
"_LEGACY_ERROR_TEMP_1276" : {
"message" : [
"The logical plan that represents the view is not analyzed."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index c3f1687e8143..dae358e4ef0b 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -19,8 +19,6 @@ package org.apache.spark.sql.errors
import java.util.Locale
-import scala.collection.mutable
-
import org.apache.hadoop.fs.Path
import org.apache.spark.{SPARK_DOC_ROOT, SparkException, SparkThrowable,
SparkUnsupportedOperationException}
@@ -2959,49 +2957,41 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
def showCreateTableNotSupportedOnTempView(table: String): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1270",
- messageParameters = Map("table" -> table))
- }
-
- def showCreateTableFailToExecuteUnsupportedFeatureError(table:
CatalogTable): Throwable = {
- new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1271",
- messageParameters = Map(
- "unsupportedFeatures" -> table.unsupportedFeatures.map(" - " +
_).mkString("\n"),
- "table" -> table.identifier.toString))
+ errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_TEMPORARY_VIEW",
+ messageParameters = Map("tableName" -> toSQLId(table)))
}
def showCreateTableNotSupportTransactionalHiveTableError(table:
CatalogTable): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1272",
- messageParameters = Map("table" -> table.identifier.toString))
+ errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_TRANSACTIONAL_HIVE_TABLE",
+ messageParameters = Map("tableName" ->
toSQLId(table.identifier.nameParts)))
}
def showCreateTableFailToExecuteUnsupportedConfError(
table: TableIdentifier,
- builder: mutable.StringBuilder): Throwable = {
+ configs: String): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1273",
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_SERDE_CONFIGURATION",
messageParameters = Map(
- "table" -> table.identifier,
- "configs" -> builder.toString()))
+ "tableName" -> toSQLId(table.nameParts),
+ "configs" -> configs))
}
def showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError(
table: TableIdentifier): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1274",
- messageParameters = Map("table" -> table.toString))
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.ON_DATA_SOURCE_TABLE_WITH_AS_SERDE",
+ messageParameters = Map("tableName" -> toSQLId(table.nameParts)))
}
def showCreateTableOrViewFailToExecuteUnsupportedFeatureError(
table: CatalogTable,
- features: Seq[String]): Throwable = {
+ unsupportedFeatures: Seq[String]): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1275",
+ errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE",
messageParameters = Map(
- "table" -> table.identifier.toString,
- "features" -> features.map(" - " + _).mkString("\n")))
+ "tableName" -> toSQLId(table.identifier.nameParts),
+ "unsupportedFeatures" -> unsupportedFeatures.map(" - " +
_).mkString("\n")))
}
def logicalPlanForViewNotAnalyzedError(): Throwable = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index ee0074dfe61b..9ecd3fd19aa6 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -1183,8 +1183,8 @@ case class ShowCreateTableCommand(
} else {
// For a Hive serde table, we try to convert it to Spark DDL.
if (tableMetadata.unsupportedFeatures.nonEmpty) {
- throw
QueryCompilationErrors.showCreateTableFailToExecuteUnsupportedFeatureError(
- tableMetadata)
+ throw
QueryCompilationErrors.showCreateTableOrViewFailToExecuteUnsupportedFeatureError(
+ tableMetadata, tableMetadata.unsupportedFeatures)
}
if
("true".equalsIgnoreCase(tableMetadata.properties.getOrElse("transactional",
"false"))) {
@@ -1237,7 +1237,8 @@ case class ShowCreateTableCommand(
hiveSerde.outputFormat.foreach { format =>
builder ++= s" OUTPUTFORMAT: $format"
}
- throw
QueryCompilationErrors.showCreateTableFailToExecuteUnsupportedConfError(table,
builder)
+ throw
QueryCompilationErrors.showCreateTableFailToExecuteUnsupportedConfError(
+ table, builder.toString())
} else {
// TODO: should we keep Hive serde properties?
val newStorage = tableMetadata.storage.copy(properties = Map.empty)
@@ -1325,9 +1326,9 @@ case class ShowCreateTableAsSerdeCommand(
}
private def showCreateHiveTable(metadata: CatalogTable): String = {
- def reportUnsupportedError(features: Seq[String]): Unit = {
+ def reportUnsupportedError(unsupportedFeatures: Seq[String]): Unit = {
throw
QueryCompilationErrors.showCreateTableOrViewFailToExecuteUnsupportedFeatureError(
- metadata, features)
+ metadata, unsupportedFeatures)
}
if (metadata.unsupportedFeatures.nonEmpty) {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
index 36fde23db5c0..18b5da0ca59f 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
@@ -162,8 +162,9 @@ trait ShowCreateTableSuiteBase extends
command.ShowCreateTableSuiteBase
exception = intercept[AnalysisException] {
getShowCreateDDL(t, true)
},
- errorClass = "_LEGACY_ERROR_TEMP_1274",
- parameters = Map("table" -> "`spark_catalog`.`ns1`.`tbl`")
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.ON_DATA_SOURCE_TABLE_WITH_AS_SERDE",
+ sqlState = "0A000",
+ parameters = Map("tableName" -> "`spark_catalog`.`ns1`.`tbl`")
)
}
}
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
index e24172194677..9d86c72f86af 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
@@ -213,20 +213,22 @@ class HiveSQLViewSuite extends SQLViewSuite with
TestHiveSingleton {
exception = intercept[AnalysisException] {
sql("SHOW CREATE TABLE v1")
},
- errorClass = "_LEGACY_ERROR_TEMP_1271",
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE",
+ sqlState = "0A000",
parameters = Map(
- "unsupportedFeatures" -> " - partitioned view",
- "table" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`"
+ "tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
+ "unsupportedFeatures" -> " - partitioned view"
)
)
checkError(
exception = intercept[AnalysisException] {
sql("SHOW CREATE TABLE v1 AS SERDE")
},
- errorClass = "_LEGACY_ERROR_TEMP_1275",
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE",
+ sqlState = "0A000",
parameters = Map(
- "table" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
- "features" -> " - partitioned view"
+ "tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
+ "unsupportedFeatures" -> " - partitioned view"
)
)
}
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
index 3dc73e116152..4c6252128094 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowCreateTableSuite.scala
@@ -365,9 +365,10 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
exception = intercept[AnalysisException] {
checkCreateSparkTableAsHive("t1")
},
- errorClass = "_LEGACY_ERROR_TEMP_1273",
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_SERDE_CONFIGURATION",
+ sqlState = "0A000",
parameters = Map(
- "table" -> "t1",
+ "tableName" -> "`spark_catalog`.`default`.`t1`",
"configs" -> (" SERDE:
org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe " +
"INPUTFORMAT: org.apache.hadoop.hive.ql.io.RCFileInputFormat " +
"OUTPUTFORMAT: org.apache.hadoop.hive.ql.io.RCFileOutputFormat"))
@@ -437,8 +438,9 @@ class ShowCreateTableSuite extends
v1.ShowCreateTableSuiteBase with CommandSuite
exception = intercept[AnalysisException] {
sql("SHOW CREATE TABLE t1")
},
- errorClass = "_LEGACY_ERROR_TEMP_1272",
- parameters = Map("table" -> "`spark_catalog`.`default`.`t1`")
+ errorClass =
"UNSUPPORTED_SHOW_CREATE_TABLE.ON_TRANSACTIONAL_HIVE_TABLE",
+ sqlState = "0A000",
+ parameters = Map("tableName" -> "`spark_catalog`.`default`.`t1`")
)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]