This is an automated email from the ASF dual-hosted git repository.

beliefer pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e955a5979cd3 [SPARK-46406][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_1023
e955a5979cd3 is described below

commit e955a5979cd3623c92e57df0c1bfc341043ee754
Author: Jiaan Geng <belie...@163.com>
AuthorDate: Sun Dec 17 09:19:46 2023 +0800

    [SPARK-46406][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_1023
    
    ### What changes were proposed in this pull request?
    Based on the suggestion at 
https://github.com/apache/spark/pull/43910#discussion_r1412089938, this PR want 
assign a name to the error class `_LEGACY_ERROR_TEMP_1023`.
    
    ### Why are the changes needed?
    Assign a name to the error class `_LEGACY_ERROR_TEMP_1023`.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    
    ### How was this patch tested?
    N/A
    
    ### Was this patch authored or co-authored using generative AI tooling?
    'No'.
    
    Closes #44355 from beliefer/SPARK-46406.
    
    Authored-by: Jiaan Geng <belie...@163.com>
    Signed-off-by: Jiaan Geng <belie...@163.com>
---
 .../src/main/resources/error/error-classes.json    |  10 +-
 ...or-conditions-invalid-sql-syntax-error-class.md |   4 +
 .../spark/sql/errors/QueryCompilationErrors.scala  |   4 +-
 .../sql/catalyst/analysis/AnalysisErrorSuite.scala | 126 ++++++++++++++-------
 .../sql-tests/analyzer-results/percentiles.sql.out |  20 ++--
 .../sql-tests/results/percentiles.sql.out          |  20 ++--
 .../sql-tests/results/udaf/udaf-group-by.sql.out   |   5 +-
 7 files changed, 126 insertions(+), 63 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 2aa5420eb22c..b4a3031c06c9 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -2229,6 +2229,11 @@
           "Partition key <partKey> must set value."
         ]
       },
+      "FUNCTION_WITH_UNSUPPORTED_SYNTAX" : {
+        "message" : [
+          "The function <prettyName> does not support <syntax>."
+        ]
+      },
       "INVALID_COLUMN_REFERENCE" : {
         "message" : [
           "Expected a column reference for transform <transform>: <expr>."
@@ -4320,11 +4325,6 @@
       "count(<targetString>.*) is not allowed. Please use count(*) or expand 
the columns manually, e.g. count(col1, col2)."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1023" : {
-    "message" : [
-      "Function <prettyName> does not support <syntax>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1024" : {
     "message" : [
       "FILTER expression is non-deterministic, it cannot be used in aggregate 
functions."
diff --git a/docs/sql-error-conditions-invalid-sql-syntax-error-class.md 
b/docs/sql-error-conditions-invalid-sql-syntax-error-class.md
index d9be7bad1032..93bd5c24c9d3 100644
--- a/docs/sql-error-conditions-invalid-sql-syntax-error-class.md
+++ b/docs/sql-error-conditions-invalid-sql-syntax-error-class.md
@@ -45,6 +45,10 @@ CREATE TEMPORARY FUNCTION with IF NOT EXISTS is not allowed.
 
 Partition key `<partKey>` must set value.
 
+## FUNCTION_WITH_UNSUPPORTED_SYNTAX
+
+The function `<prettyName>` does not support `<syntax>`.
+
 ## INVALID_COLUMN_REFERENCE
 
 Expected a column reference for transform `<transform>`: `<expr>`.
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 5f49fe03cba7..a2ce6cc16393 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -611,8 +611,8 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
 
   def functionWithUnsupportedSyntaxError(prettyName: String, syntax: String): 
Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1023",
-      messageParameters = Map("prettyName" -> prettyName, "syntax" -> syntax))
+      errorClass = "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      messageParameters = Map("prettyName" -> toSQLId(prettyName), "syntax" -> 
toSQLStmt(syntax)))
   }
 
   def nonDeterministicFilterInAggregateError(): Throwable = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
index 0676d3834794..ac263230f127 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
 import org.apache.spark.sql.catalyst.plans.{AsOfJoinDirection, Cross, Inner, 
LeftOuter, RightOuter}
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, 
GenericArrayData, MapData}
+import org.apache.spark.sql.errors.DataTypeErrorsBase
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
 import org.apache.spark.unsafe.types.UTF8String
@@ -118,7 +119,7 @@ case class TestFunctionWithTypeCheckFailure(
 
 case class UnresolvedTestPlan() extends UnresolvedLeafNode
 
-class AnalysisErrorSuite extends AnalysisTest {
+class AnalysisErrorSuite extends AnalysisTest with DataTypeErrorsBase {
   import TestRelations._
 
   def errorTest(
@@ -241,56 +242,105 @@ class AnalysisErrorSuite extends AnalysisTest {
     "window aggregate function with filter predicate is not supported" :: Nil
   )
 
-  errorTest(
-    "distinct function",
-    CatalystSqlParser.parsePlan("SELECT hex(DISTINCT a) FROM TaBlE"),
-    "Function hex does not support DISTINCT" :: Nil)
+  test("distinct function") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT hex(DISTINCT a) FROM TaBlE"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("hex"),
+        "syntax" -> toSQLStmt("DISTINCT")),
+      Array(ExpectedContext("hex(DISTINCT a)", 7, 21)))
+  }
 
-  errorTest(
-    "non aggregate function with filter predicate",
-    CatalystSqlParser.parsePlan("SELECT hex(a) FILTER (WHERE c = 1) FROM 
TaBlE2"),
-    "Function hex does not support FILTER clause" :: Nil)
+  test("non aggregate function with filter predicate") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT hex(a) FILTER (WHERE c = 1) FROM 
TaBlE2"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("hex"),
+        "syntax" -> toSQLStmt("FILTER CLAUSE")),
+      Array(ExpectedContext("hex(a) FILTER (WHERE c = 1)", 7, 33)))
+  }
 
-  errorTest(
-    "distinct window function",
-    CatalystSqlParser.parsePlan("SELECT percent_rank(DISTINCT a) OVER () FROM 
TaBlE"),
-    "Function percent_rank does not support DISTINCT" :: Nil)
+  test("distinct window function") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT percent_rank(DISTINCT a) OVER () 
FROM TaBlE"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("percent_rank"),
+        "syntax" -> toSQLStmt("DISTINCT")),
+      Array(ExpectedContext("percent_rank(DISTINCT a) OVER ()", 7, 38)))
+  }
 
-  errorTest(
-    "window function with filter predicate",
-    CatalystSqlParser.parsePlan("SELECT percent_rank(a) FILTER (WHERE c > 1) 
OVER () FROM TaBlE2"),
-    "Function percent_rank does not support FILTER clause" :: Nil)
+  test("window function with filter predicate") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan(
+        "SELECT percent_rank(a) FILTER (WHERE c > 1) OVER () FROM TaBlE2"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("percent_rank"),
+        "syntax" -> toSQLStmt("FILTER CLAUSE")),
+      Array(ExpectedContext("percent_rank(a) FILTER (WHERE c > 1) OVER ()", 7, 
50)))
+  }
 
-  errorTest(
-    "higher order function with filter predicate",
-    CatalystSqlParser.parsePlan("SELECT aggregate(array(1, 2, 3), 0, (acc, x) 
-> acc + x) " +
-      "FILTER (WHERE c > 1)"),
-    "Function aggregate does not support FILTER clause" :: Nil)
+  test("higher order function with filter predicate") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT aggregate(array(1, 2, 3), 0, (acc, 
x) -> acc + x) " +
+        "FILTER (WHERE c > 1)"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("aggregate"),
+        "syntax" -> toSQLStmt("FILTER CLAUSE")),
+      Array(ExpectedContext(
+        "aggregate(array(1, 2, 3), 0, (acc, x) -> acc + x) FILTER (WHERE c > 
1)", 7, 76)))
+  }
 
   errorTest(
     "non-deterministic filter predicate in aggregate functions",
     CatalystSqlParser.parsePlan("SELECT count(a) FILTER (WHERE rand(int(c)) > 
1) FROM TaBlE2"),
     "FILTER expression is non-deterministic, it cannot be used in aggregate 
functions" :: Nil)
 
-  errorTest(
-    "function don't support ignore nulls",
-    CatalystSqlParser.parsePlan("SELECT hex(a) IGNORE NULLS FROM TaBlE2"),
-    "Function hex does not support IGNORE NULLS" :: Nil)
+  test("function don't support ignore nulls") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT hex(a) IGNORE NULLS FROM TaBlE2"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("hex"),
+        "syntax" -> toSQLStmt("IGNORE NULLS")),
+      Array(ExpectedContext("hex(a) IGNORE NULLS", 7, 25)))
+  }
 
-  errorTest(
-    "some window function don't support ignore nulls",
-    CatalystSqlParser.parsePlan("SELECT percent_rank(a) IGNORE NULLS FROM 
TaBlE2"),
-    "Function percent_rank does not support IGNORE NULLS" :: Nil)
+  test("some window function don't support ignore nulls") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT percent_rank(a) IGNORE NULLS FROM 
TaBlE2"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("percent_rank"),
+        "syntax" -> toSQLStmt("IGNORE NULLS")),
+      Array(ExpectedContext("percent_rank(a) IGNORE NULLS", 7, 34)))
+  }
 
-  errorTest(
-    "aggregate function don't support ignore nulls",
-    CatalystSqlParser.parsePlan("SELECT count(a) IGNORE NULLS FROM TaBlE2"),
-    "Function count does not support IGNORE NULLS" :: Nil)
+  test("aggregate function don't support ignore nulls") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan("SELECT count(a) IGNORE NULLS FROM TaBlE2"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("count"),
+        "syntax" -> toSQLStmt("IGNORE NULLS")),
+      Array(ExpectedContext("count(a) IGNORE NULLS", 7, 27)))
+  }
 
-  errorTest(
-    "higher order function don't support ignore nulls",
-    CatalystSqlParser.parsePlan("SELECT aggregate(array(1, 2, 3), 0, (acc, x) 
-> acc + x) " +
-      "IGNORE NULLS"), "Function aggregate does not support IGNORE NULLS" :: 
Nil)
+  test("higher order function don't support ignore nulls") {
+    assertAnalysisErrorClass(
+      CatalystSqlParser.parsePlan(
+        "SELECT aggregate(array(1, 2, 3), 0, (acc, x) -> acc + x) IGNORE 
NULLS"),
+      expectedErrorClass = 
"INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+      expectedMessageParameters = Map(
+        "prettyName" -> toSQLId("aggregate"),
+        "syntax" -> toSQLStmt("IGNORE NULLS")),
+      Array(ExpectedContext(
+        "aggregate(array(1, 2, 3), 0, (acc, x) -> acc + x) IGNORE NULLS", 7, 
68)))
+  }
 
   errorClassTest(
     name = "nested aggregate functions",
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out
index 90ef5ac35a1e..0e63ed9e126e 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out
@@ -156,9 +156,10 @@ FROM aggr
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "round",
+    "prettyName" : "`round`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
@@ -178,9 +179,10 @@ FROM aggr
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "round",
+    "prettyName" : "`round`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
@@ -200,9 +202,10 @@ FROM aggr
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "percentile",
+    "prettyName" : "`percentile`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
@@ -222,9 +225,10 @@ FROM aggr
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "percentile",
+    "prettyName" : "`percentile`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
diff --git a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out 
b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out
index 5dc430522e51..fc951308563f 100644
--- a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out
@@ -122,9 +122,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "round",
+    "prettyName" : "`round`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
@@ -146,9 +147,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "round",
+    "prettyName" : "`round`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
@@ -170,9 +172,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "percentile",
+    "prettyName" : "`percentile`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
@@ -194,9 +197,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "percentile",
+    "prettyName" : "`percentile`",
     "syntax" : "WITHIN GROUP (ORDER BY ...)"
   },
   "queryContext" : [ {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out 
b/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out
index 84040b6d142c..1caeac58ab0b 100644
--- a/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out
@@ -160,9 +160,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1023",
+  "errorClass" : "INVALID_SQL_SYNTAX.FUNCTION_WITH_UNSUPPORTED_SYNTAX",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "prettyName" : "pythonudaf",
+    "prettyName" : "`pythonudaf`",
     "syntax" : "DISTINCT"
   },
   "queryContext" : [ {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to