This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 29ff671933e [SPARK-37938][SQL][TESTS] Use error classes in the parsing 
errors of partitions
29ff671933e is described below

commit 29ff671933e3b432e69a26761bc79856f21b82c7
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Thu May 5 19:22:28 2022 +0300

    [SPARK-37938][SQL][TESTS] Use error classes in the parsing errors of 
partitions
    
    ## What changes were proposed in this pull request?
    Migrate the following errors in QueryParsingErrors onto use error classes:
    
    - emptyPartitionKeyError => INVALID_SQL_SYNTAX
    - partitionTransformNotExpectedError => INVALID_SQL_SYNTAX
    - descColumnForPartitionUnsupportedError => 
UNSUPPORTED_FEATURE.DESC_TABLE_COLUMN_PARTITION
    - incompletePartitionSpecificationError => INVALID_SQL_SYNTAX
    
    ### Why are the changes needed?
    Porting parsing errors of partitions to new error framework, improve test 
coverage, and document expected error messages in tests.
    
    ### Does this PR introduce any user-facing change?
    No
    
    ### How was this patch tested?
    By running new test:
    ```
    $ build/sbt "sql/testOnly *QueryParsingErrorsSuite*"
    ```
    
    Closes #36416 from panbingkun/SPARK-37938.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |  3 ++
 .../spark/sql/errors/QueryParsingErrors.scala      | 22 ++++++--
 .../spark/sql/catalyst/parser/DDLParserSuite.scala |  2 +-
 .../resources/sql-tests/results/describe.sql.out   |  2 +-
 .../spark/sql/errors/QueryErrorsSuiteBase.scala    | 16 ++++--
 .../spark/sql/errors/QueryParsingErrorsSuite.scala | 60 ++++++++++++++++++++++
 .../command/ShowPartitionsParserSuite.scala        | 22 +++++---
 .../command/TruncateTableParserSuite.scala         | 21 +++++---
 8 files changed, 125 insertions(+), 23 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 24b50c4209a..3a7bc757f73 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -206,6 +206,9 @@
       "AES_MODE" : {
         "message" : [ "AES-<mode> with the padding <padding> by the 
<functionName> function." ]
       },
+      "DESC_TABLE_COLUMN_PARTITION" : {
+        "message" : [ "DESC TABLE COLUMN for a specific partition." ]
+      },
       "DISTRIBUTE_BY" : {
         "message" : [ "DISTRIBUTE BY clause." ]
       },
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index ed5773f4f82..1d15557c9d0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -77,7 +77,11 @@ object QueryParsingErrors extends QueryErrorsBase {
   }
 
   def emptyPartitionKeyError(key: String, ctx: PartitionSpecContext): 
Throwable = {
-    new ParseException(s"Found an empty partition key '$key'.", ctx)
+    new ParseException(
+      errorClass = "INVALID_SQL_SYNTAX",
+      messageParameters =
+        Array(s"Partition key ${toSQLId(key)} must set value (can't be 
empty)."),
+      ctx)
   }
 
   def combinationQueryResultClausesUnsupportedError(ctx: 
QueryOrganizationContext): Throwable = {
@@ -243,7 +247,11 @@ object QueryParsingErrors extends QueryErrorsBase {
 
   def partitionTransformNotExpectedError(
       name: String, describe: String, ctx: ApplyTransformContext): Throwable = 
{
-    new ParseException(s"Expected a column reference for transform $name: 
$describe", ctx)
+    new ParseException(
+      errorClass = "INVALID_SQL_SYNTAX",
+      messageParameters =
+        Array(s"Expected a column reference for transform ${toSQLId(name)}: 
$describe"),
+      ctx)
   }
 
   def tooManyArgumentsForTransformError(name: String, ctx: 
ApplyTransformContext): Throwable = {
@@ -298,12 +306,18 @@ object QueryParsingErrors extends QueryErrorsBase {
   }
 
   def descColumnForPartitionUnsupportedError(ctx: DescribeRelationContext): 
Throwable = {
-    new ParseException("DESC TABLE COLUMN for a specific partition is not 
supported", ctx)
+    new ParseException(
+      errorClass = "UNSUPPORTED_FEATURE",
+      messageParameters = Array("DESC_TABLE_COLUMN_PARTITION"),
+      ctx)
   }
 
   def incompletePartitionSpecificationError(
       key: String, ctx: DescribeRelationContext): Throwable = {
-    new ParseException(s"PARTITION specification is incomplete: `$key`", ctx)
+    new ParseException(
+      errorClass = "INVALID_SQL_SYNTAX",
+      messageParameters = Array(s"PARTITION specification is incomplete: 
${toSQLId(key)}"),
+      ctx)
   }
 
   def computeStatisticsNotExpectedError(ctx: IdentifierContext): Throwable = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index ade8c61f79f..af939b0aa69 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -1201,7 +1201,7 @@ class DDLParserSuite extends AnalysisTest {
     val caught = intercept[AnalysisException](
       parsePlan("DESCRIBE TABLE t PARTITION (ds='1970-01-01') col"))
     assert(caught.getMessage.contains(
-        "DESC TABLE COLUMN for a specific partition is not supported"))
+        "The feature is not supported: DESC TABLE COLUMN for a specific 
partition."))
   }
 
   test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") {
diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out 
b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
index 9d1dba8780d..1219e472556 100644
--- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
@@ -382,7 +382,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-PARTITION specification is incomplete: `d`(line 1, pos 0)
+[INVALID_SQL_SYNTAX] Invalid SQL syntax: PARTITION specification is 
incomplete: `d`(line 1, pos 0)
 
 == SQL ==
 DESC t PARTITION (c='Us', d)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
index eb7871d5559..8ae5cf29923 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
@@ -49,17 +49,25 @@ trait QueryErrorsSuiteBase extends SharedSparkSession {
       errorSubClass: Option[String] = None,
       sqlState: String,
       message: String): Unit = {
-    val e = intercept[ParseException] {
+    val exception = intercept[ParseException] {
       sql(sqlText)
     }
+    checkParsingError(exception, errorClass, errorSubClass, sqlState, message)
+  }
 
+  def checkParsingError(
+      exception: Exception with SparkThrowable,
+      errorClass: String,
+      errorSubClass: Option[String] = None,
+      sqlState: String,
+      message: String): Unit = {
     val fullErrorClass = if (errorSubClass.isDefined) {
       errorClass + "." + errorSubClass.get
     } else {
       errorClass
     }
-    assert(e.getErrorClass === errorClass)
-    assert(e.getSqlState === sqlState)
-    assert(e.getMessage === s"""\n[$fullErrorClass] """ + message)
+    assert(exception.getErrorClass === errorClass)
+    assert(exception.getSqlState === sqlState)
+    assert(exception.getMessage === s"""\n[$fullErrorClass] """ + message)
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
index 4eaf609a610..50966db2a21 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
@@ -582,4 +582,64 @@ class QueryParsingErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase {
           |---------------------------^^^
           |""".stripMargin)
   }
+
+  test("INVALID_SQL_SYNTAX: show table partition key must set value") {
+    validateParsingError(
+      sqlText = "SHOW TABLE EXTENDED IN default LIKE 'employee' PARTITION 
(grade)",
+      errorClass = "INVALID_SQL_SYNTAX",
+      sqlState = "42000",
+      message =
+        """Invalid SQL syntax: Partition key `grade` must set value (can't be 
empty).(line 1, pos 47)
+          |
+          |== SQL ==
+          |SHOW TABLE EXTENDED IN default LIKE 'employee' PARTITION (grade)
+          |-----------------------------------------------^^^
+          |""".stripMargin)
+  }
+
+  test("INVALID_SQL_SYNTAX: expected a column reference for transform bucket") 
{
+    validateParsingError(
+      sqlText =
+        "CREATE TABLE my_tab(a INT, b STRING) USING parquet PARTITIONED BY 
(bucket(32, a, 66))",
+      errorClass = "INVALID_SQL_SYNTAX",
+      sqlState = "42000",
+      message =
+        """Invalid SQL syntax: Expected a column reference for transform 
`bucket`: 66(line 1, pos 67)
+          |
+          |== SQL ==
+          |CREATE TABLE my_tab(a INT, b STRING) USING parquet PARTITIONED BY 
(bucket(32, a, 66))
+          
|-------------------------------------------------------------------^^^
+          |""".stripMargin)
+  }
+
+  test("UNSUPPORTED_FEATURE: DESC TABLE COLUMN for a specific partition") {
+    validateParsingError(
+      sqlText = "DESCRIBE TABLE EXTENDED customer PARTITION (grade = 'A') 
customer.age",
+      errorClass = "UNSUPPORTED_FEATURE",
+      errorSubClass = Some("DESC_TABLE_COLUMN_PARTITION"),
+      sqlState = "0A000",
+      message =
+        """The feature is not supported: DESC TABLE COLUMN for a specific 
partition""" +
+        """.(line 1, pos 0)""" +
+        """|
+           |
+           |== SQL ==
+           |DESCRIBE TABLE EXTENDED customer PARTITION (grade = 'A') 
customer.age
+           |^^^
+           |""".stripMargin)
+  }
+
+  test("INVALID_SQL_SYNTAX: PARTITION specification is incomplete") {
+    validateParsingError(
+      sqlText = "DESCRIBE TABLE EXTENDED customer PARTITION (grade)",
+      errorClass = "INVALID_SQL_SYNTAX",
+      sqlState = "42000",
+      message =
+        """Invalid SQL syntax: PARTITION specification is incomplete: 
`grade`(line 1, pos 0)
+          |
+          |== SQL ==
+          |DESCRIBE TABLE EXTENDED customer PARTITION (grade)
+          |^^^
+          |""".stripMargin)
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
index 1c7b1282fde..8f75f5df7b9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
@@ -21,10 +21,10 @@ import 
org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartition
 import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.logical.ShowPartitions
+import org.apache.spark.sql.errors.QueryErrorsSuiteBase
 import org.apache.spark.sql.execution.SparkSqlParser
-import org.apache.spark.sql.test.SharedSparkSession
 
-class ShowPartitionsParserSuite extends AnalysisTest with SharedSparkSession {
+class ShowPartitionsParserSuite extends AnalysisTest with QueryErrorsSuiteBase 
{
   test("SHOW PARTITIONS") {
     val commandName = "SHOW PARTITIONS"
     Seq(
@@ -48,10 +48,18 @@ class ShowPartitionsParserSuite extends AnalysisTest with 
SharedSparkSession {
   }
 
   test("empty values in non-optional partition specs") {
-    val e = intercept[ParseException] {
-      new SparkSqlParser().parsePlan(
-        "SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)")
-    }.getMessage
-    assert(e.contains("Found an empty partition key 'b'"))
+    checkParsingError(
+      exception = intercept[ParseException] {
+        new SparkSqlParser().parsePlan("SHOW PARTITIONS dbx.tab1 PARTITION 
(a='1', b)")
+      },
+      errorClass = "INVALID_SQL_SYNTAX",
+      sqlState = "42000",
+      message =
+        """Invalid SQL syntax: Partition key `b` must set value (can't be 
empty).(line 1, pos 25)
+          |
+          |== SQL ==
+          |SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)
+          |-------------------------^^^
+          |""".stripMargin)
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
index 7f4a48023c1..761e4222792 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
@@ -21,9 +21,9 @@ import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, 
UnresolvedPartition
 import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.logical.{TruncatePartition, 
TruncateTable}
-import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.sql.errors.QueryErrorsSuiteBase
 
-class TruncateTableParserSuite extends AnalysisTest with SharedSparkSession {
+class TruncateTableParserSuite extends AnalysisTest with QueryErrorsSuiteBase {
   test("truncate table") {
     comparePlans(
       parsePlan("TRUNCATE TABLE a.b.c"),
@@ -47,9 +47,18 @@ class TruncateTableParserSuite extends AnalysisTest with 
SharedSparkSession {
   }
 
   test("empty values in non-optional partition specs") {
-    val errMsg = intercept[ParseException] {
-      parsePlan("TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)")
-    }.getMessage
-    assert(errMsg.contains("Found an empty partition key 'b'"))
+    checkParsingError(
+      exception = intercept[ParseException] {
+        parsePlan("TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)")
+      },
+      errorClass = "INVALID_SQL_SYNTAX",
+      sqlState = "42000",
+      message =
+        """Invalid SQL syntax: Partition key `b` must set value (can't be 
empty).(line 1, pos 24)
+          |
+          |== SQL ==
+          |TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)
+          |------------------------^^^
+          |""".stripMargin)
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to