This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 636bd764aec [SPARK-40790][SQL][TESTS] Check error classes in DDL parsing tests 636bd764aec is described below commit 636bd764aec2cac0f392dda995c0e8f4ffce5707 Author: panbingkun <pbk1...@gmail.com> AuthorDate: Mon Oct 17 11:36:22 2022 +0300 [SPARK-40790][SQL][TESTS] Check error classes in DDL parsing tests ### What changes were proposed in this pull request? This PR aims to replace 'intercept' with 'Check error classes' in DDL parsing tests, include: - AlterNamespaceSetPropertiesParserSuite - AlterTableDropPartitionParserSuite - AlterTableRenameParserSuite - AlterTableRecoverPartitionsParserSuite - DescribeTableParserSuite - TruncateTableParserSuite - AlterTableSetSerdeParserSuite - ShowPartitionsParserSuite ### Why are the changes needed? The changes improve the error framework. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running the modified test suite: ``` $ build/sbt "test:testOnly *ParserSuite" ``` Closes #38280 from panbingkun/SPARK-40790. Authored-by: panbingkun <pbk1...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../AlterNamespaceSetPropertiesParserSuite.scala | 16 ++++++++++------ .../command/AlterTableDropPartitionParserSuite.scala | 14 +++++++++----- .../AlterTableRecoverPartitionsParserSuite.scala | 10 +++++----- .../execution/command/AlterTableRenameParserSuite.scala | 17 +++++++++++------ .../command/AlterTableSetSerdeParserSuite.scala | 14 ++++++++------ .../execution/command/DescribeTableParserSuite.scala | 14 +++++++++----- .../execution/command/ShowPartitionsParserSuite.scala | 5 +---- .../execution/command/TruncateTableParserSuite.scala | 5 +---- 8 files changed, 54 insertions(+), 41 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesParserSuite.scala index 868dc275b8a..9d70ceeef57 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesParserSuite.scala @@ -19,10 +19,10 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedNamespace} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.SetNamespaceProperties class AlterNamespaceSetPropertiesParserSuite extends AnalysisTest { + test("set namespace properties") { Seq("DATABASE", "SCHEMA", "NAMESPACE").foreach { nsToken => Seq("PROPERTIES", "DBPROPERTIES").foreach { propToken => @@ -40,10 +40,14 @@ class AlterNamespaceSetPropertiesParserSuite extends AnalysisTest { } test("property values must be set") { - val e = intercept[ParseException] { - parsePlan("ALTER NAMESPACE my_db SET PROPERTIES('key_without_value', 'key_with_value'='x')") - } - assert(e.getMessage.contains( - "Operation not allowed: Values must be specified for key(s): [key_without_value]")) + val sql = "ALTER NAMESPACE my_db SET PROPERTIES('key_without_value', 'key_with_value'='x')" + checkError( + exception = parseException(parsePlan)(sql), + errorClass = "_LEGACY_ERROR_TEMP_0035", + parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"), + context = ExpectedContext( + fragment = sql, + start = 0, + stop = 78)) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionParserSuite.scala index 4c60c80f4e0..e52c012a01b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionParserSuite.scala @@ -19,11 +19,11 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.DropPartitions import org.apache.spark.sql.test.SharedSparkSession class AlterTableDropPartitionParserSuite extends AnalysisTest with SharedSparkSession { + test("drop partition") { val sql = """ |ALTER TABLE table_name DROP PARTITION @@ -92,9 +92,13 @@ class AlterTableDropPartitionParserSuite extends AnalysisTest with SharedSparkSe test("drop partition from view") { val sql = "ALTER VIEW table_name DROP PARTITION (p=1)" - val errMsg = intercept[ParseException] { - parsePlan(sql) - }.getMessage - assert(errMsg.contains("Operation not allowed")) + checkError( + exception = parseException(parsePlan)(sql), + errorClass = "_LEGACY_ERROR_TEMP_0035", + parameters = Map("message" -> "ALTER VIEW ... DROP PARTITION"), + context = ExpectedContext( + fragment = sql, + start = 0, + stop = 41)) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRecoverPartitionsParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRecoverPartitionsParserSuite.scala index 394392299ba..e0a25580652 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRecoverPartitionsParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRecoverPartitionsParserSuite.scala @@ -19,17 +19,17 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.RecoverPartitions import org.apache.spark.sql.test.SharedSparkSession class AlterTableRecoverPartitionsParserSuite extends AnalysisTest with SharedSparkSession { test("recover partitions without table") { - val errMsg = intercept[ParseException] { - parsePlan("ALTER TABLE RECOVER PARTITIONS") - }.getMessage - assert(errMsg.contains("Syntax error at or near 'PARTITIONS'")) + val sql = "ALTER TABLE RECOVER PARTITIONS" + checkError( + exception = parseException(parsePlan)(sql), + errorClass = "PARSE_SYNTAX_ERROR", + parameters = Map("error" -> "'PARTITIONS'", "hint" -> "")) } test("recover partitions of a table") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenameParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenameParserSuite.scala index e4087e63797..c2305feb511 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenameParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenameParserSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTableOrView} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.RenameTable class AlterTableRenameParserSuite extends AnalysisTest { @@ -42,10 +41,16 @@ class AlterTableRenameParserSuite extends AnalysisTest { } test("invalid table identifiers") { - Seq( - "ALTER TABLE RENAME TO x.y.z", - "ALTER TABLE _ RENAME TO .z").foreach { renameCmd => - intercept[ParseException] { parsePlan(renameCmd) } - } + val sql1 = "ALTER TABLE RENAME TO x.y.z" + checkError( + exception = parseException(parsePlan)(sql1), + errorClass = "PARSE_SYNTAX_ERROR", + parameters = Map("error" -> "'TO'", "hint" -> "")) + + val sql2 = "ALTER TABLE _ RENAME TO .z" + checkError( + exception = parseException(parsePlan)(sql2), + errorClass = "PARSE_SYNTAX_ERROR", + parameters = Map("error" -> "'.'", "hint" -> ": extra input '.'")) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala index b5143e8f92d..1e99801c255 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.SetTableSerDeProperties import org.apache.spark.sql.test.SharedSparkSession @@ -30,11 +29,14 @@ class AlterTableSetSerdeParserSuite extends AnalysisTest with SharedSparkSession test("SerDe property values must be set") { val sql = "ALTER TABLE table_name SET SERDE 'serde' " + "WITH SERDEPROPERTIES('key_without_value', 'key_with_value'='x')" - val errMsg = intercept[ParseException] { - parsePlan(sql) - }.getMessage - assert(errMsg.contains("Operation not allowed")) - assert(errMsg.contains("key_without_value")) + checkError( + exception = parseException(parsePlan)(sql), + errorClass = "_LEGACY_ERROR_TEMP_0035", + parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"), + context = ExpectedContext( + fragment = sql, + start = 0, + stop = 103)) } test("alter table SerDe properties by 'SET SERDE'") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeTableParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeTableParserSuite.scala index 5f3b3eda418..ee1b588741c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeTableParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeTableParserSuite.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql.execution.command -import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedAttribute, UnresolvedTableOrView} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan import org.apache.spark.sql.catalyst.plans.logical.{DescribeColumn, DescribeRelation} @@ -76,9 +75,14 @@ class DescribeTableParserSuite extends AnalysisTest { UnresolvedAttribute(Seq("col")), isExtended = true)) - val caught = intercept[AnalysisException]( - parsePlan("DESCRIBE TABLE t PARTITION (ds='1970-01-01') col")) - assert(caught.getMessage.contains( - "The feature is not supported: DESC TABLE COLUMN for a specific partition.")) + val sql = "DESCRIBE TABLE t PARTITION (ds='1970-01-01') col" + checkError( + exception = parseException(parsePlan)(sql), + errorClass = "UNSUPPORTED_FEATURE.DESC_TABLE_COLUMN_PARTITION", + parameters = Map.empty, + context = ExpectedContext( + fragment = sql, + start = 0, + stop = 47)) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala index b5a294025a8..ef6009313dd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.ShowPartitions class ShowPartitionsParserSuite extends AnalysisTest { @@ -47,9 +46,7 @@ class ShowPartitionsParserSuite extends AnalysisTest { test("empty values in non-optional partition specs") { checkError( - exception = intercept[ParseException] { - parsePlan("SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)") - }, + exception = parseException(parsePlan)("SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)"), errorClass = "INVALID_SQL_SYNTAX", sqlState = "42000", parameters = Map("inputString" -> "Partition key `b` must set value (can't be empty)."), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala index 0b9ad9628ad..183db06ac94 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan -import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.{TruncatePartition, TruncateTable} class TruncateTableParserSuite extends AnalysisTest { @@ -47,9 +46,7 @@ class TruncateTableParserSuite extends AnalysisTest { test("empty values in non-optional partition specs") { checkError( - exception = intercept[ParseException] { - parsePlan("TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)") - }, + exception = parseException(parsePlan)("TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)"), errorClass = "INVALID_SQL_SYNTAX", sqlState = "42000", parameters = Map("inputString" -> "Partition key `b` must set value (can't be empty)."), --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org