This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 912caf4264c [SPARK-41960][SQL] Assign name to _LEGACY_ERROR_TEMP_1056
912caf4264c is described below
commit 912caf4264cccedfb96dfd0a5d37fb635973ee83
Author: itholic <[email protected]>
AuthorDate: Wed Jan 18 17:23:12 2023 +0800
[SPARK-41960][SQL] Assign name to _LEGACY_ERROR_TEMP_1056
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_1056,
"WRONG_COMMAND_FOR_OBJECT_TYPE".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39480 from itholic/LEGACY_1056.
Authored-by: itholic <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
core/src/main/resources/error/error-classes.json | 10 ++++----
.../spark/sql/errors/QueryCompilationErrors.scala | 20 ++++++++++++----
.../apache/spark/sql/execution/command/ddl.scala | 16 +++++++++++--
.../spark/sql/execution/command/DDLSuite.scala | 12 ++++++++--
.../spark/sql/hive/execution/HiveDDLSuite.scala | 28 +++++++++++++++++-----
5 files changed, 67 insertions(+), 19 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 3f0abe77d0b..e3904c1fe5a 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1732,6 +1732,11 @@
"`writeStream` can be called only on streaming Dataset/DataFrame."
]
},
+ "WRONG_COMMAND_FOR_OBJECT_TYPE" : {
+ "message" : [
+ "The operation <operation> requires a <requiredType>. But <objectName>
is a <foundType>. Use <alternative> instead."
+ ]
+ },
"WRONG_NUM_ARGS" : {
"message" : [
"Invalid number of arguments for the function <functionName>."
@@ -2257,11 +2262,6 @@
"The database name is not valid: <quoted>."
]
},
- "_LEGACY_ERROR_TEMP_1056" : {
- "message" : [
- "Cannot drop a view with DROP TABLE. Please use DROP VIEW instead."
- ]
- },
"_LEGACY_ERROR_TEMP_1057" : {
"message" : [
"SHOW COLUMNS with conflicting databases: '<dbA>' != '<dbB>'."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 91412b760bd..5fe141e7286 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -777,10 +777,22 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase {
messageParameters = Map("database" -> quoted))
}
- def cannotDropViewWithDropTableError(): Throwable = {
- new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1056",
- messageParameters = Map.empty)
+ def wrongCommandForObjectTypeError(
+ operation: String,
+ requiredType: String,
+ objectName: String,
+ foundType: String,
+ alternative: String): Throwable = {
+ new AnalysisException(
+ errorClass = "WRONG_COMMAND_FOR_OBJECT_TYPE",
+ messageParameters = Map(
+ "operation" -> operation,
+ "requiredType" -> requiredType,
+ "objectName" -> objectName,
+ "foundType" -> foundType,
+ "alternative" -> alternative
+ )
+ )
}
def showColumnsWithConflictDatabasesError(
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index 1f71a104707..fdd4f10c793 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -226,9 +226,21 @@ case class DropTableCommand(
// issue an exception.
catalog.getTableMetadata(tableName).tableType match {
case CatalogTableType.VIEW if !isView =>
- throw QueryCompilationErrors.cannotDropViewWithDropTableError()
+ throw QueryCompilationErrors.wrongCommandForObjectTypeError(
+ operation = "DROP TABLE",
+ requiredType = s"${CatalogTableType.EXTERNAL.name} or
${CatalogTableType.MANAGED.name}",
+ objectName = catalog.getTableMetadata(tableName).qualifiedName,
+ foundType = catalog.getTableMetadata(tableName).tableType.name,
+ alternative = "DROP VIEW"
+ )
case o if o != CatalogTableType.VIEW && isView =>
- throw QueryCompilationErrors.cannotDropViewWithDropTableError()
+ throw QueryCompilationErrors.wrongCommandForObjectTypeError(
+ operation = "DROP VIEW",
+ requiredType = CatalogTableType.VIEW.name,
+ objectName = catalog.getTableMetadata(tableName).qualifiedName,
+ foundType = o.name,
+ alternative = "DROP TABLE"
+ )
case _ =>
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index f5d17b142e2..b1398f17e9e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -984,8 +984,16 @@ abstract class DDLSuite extends QueryTest with
DDLSuiteBase {
val e = intercept[AnalysisException] {
sql("DROP VIEW dbx.tab1")
}
- assert(e.getMessage.contains(
- "Cannot drop a view with DROP TABLE. Please use DROP VIEW instead"))
+ checkError(
+ exception = e,
+ errorClass = "WRONG_COMMAND_FOR_OBJECT_TYPE",
+ parameters = Map(
+ "alternative" -> "DROP TABLE",
+ "operation" -> "DROP VIEW",
+ "foundType" -> "EXTERNAL",
+ "requiredType" -> "VIEW",
+ "objectName" -> "spark_catalog.dbx.tab1")
+ )
}
protected def testSetProperties(isDatasourceTable: Boolean): Unit = {
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index fbcc6f8caa9..445477dc024 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -1052,9 +1052,17 @@ class HiveDDLSuite
test("drop table using drop view") {
withTable("tab1") {
sql("CREATE TABLE tab1(c1 int)")
- assertAnalysisError(
- "DROP VIEW tab1",
- "Cannot drop a view with DROP TABLE. Please use DROP VIEW instead")
+ assertAnalysisErrorClass(
+ sqlText = "DROP VIEW tab1",
+ errorClass = "WRONG_COMMAND_FOR_OBJECT_TYPE",
+ parameters = Map(
+ "alternative" -> "DROP TABLE",
+ "operation" -> "DROP VIEW",
+ "foundType" -> "MANAGED",
+ "requiredType" -> "VIEW",
+ "objectName" -> "spark_catalog.default.tab1"
+ )
+ )
}
}
@@ -1063,9 +1071,17 @@ class HiveDDLSuite
spark.range(10).write.saveAsTable("tab1")
withView("view1") {
sql("CREATE VIEW view1 AS SELECT * FROM tab1")
- assertAnalysisError(
- "DROP TABLE view1",
- "Cannot drop a view with DROP TABLE. Please use DROP VIEW instead")
+ assertAnalysisErrorClass(
+ sqlText = "DROP TABLE view1",
+ errorClass = "WRONG_COMMAND_FOR_OBJECT_TYPE",
+ parameters = Map(
+ "alternative" -> "DROP VIEW",
+ "operation" -> "DROP TABLE",
+ "foundType" -> "VIEW",
+ "requiredType" -> "EXTERNAL or MANAGED",
+ "objectName" -> "spark_catalog.default.view1"
+ )
+ )
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]