This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1996a94b09f [SPARK-41389][CORE][SQL] Reuse `WRONG_NUM_ARGS` instead of 
`_LEGACY_ERROR_TEMP_1044`
1996a94b09f is described below

commit 1996a94b09fe1f450eb33ddb23b16af090bc4d1b
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Mon Dec 5 18:04:51 2022 +0300

    [SPARK-41389][CORE][SQL] Reuse `WRONG_NUM_ARGS` instead of 
`_LEGACY_ERROR_TEMP_1044`
    
    ### What changes were proposed in this pull request?
    This pr aims to reuse error class `WRONG_NUM_ARGS` instead of 
`_LEGACY_ERROR_TEMP_1044`.
    
    ### Why are the changes needed?
    Proper names of error classes to improve user experience with Spark SQL.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass Github Actions.
    
    Closes #38913 from LuciferYang/SPARK-41389.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json                    | 5 -----
 .../org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala   | 5 +++--
 .../scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala  | 6 ------
 .../resources/sql-tests/results/sql-compatibility-functions.sql.out | 6 ++++--
 4 files changed, 7 insertions(+), 15 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 7d5c272a77f..19ab5ada2b5 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -2011,11 +2011,6 @@
       "Invalid arguments for function <name>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1044" : {
-    "message" : [
-      "Function <name> accepts only one argument."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1045" : {
     "message" : [
       "ALTER TABLE SET LOCATION does not support partition for v2 tables."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 3817f00d09d..be16eaec6ac 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -896,8 +896,9 @@ object FunctionRegistry {
       name: String,
       dataType: DataType): (String, (ExpressionInfo, FunctionBuilder)) = {
     val builder = (args: Seq[Expression]) => {
-      if (args.size != 1) {
-        throw QueryCompilationErrors.functionAcceptsOnlyOneArgumentError(name)
+      val argSize = args.size
+      if (argSize != 1) {
+        throw QueryCompilationErrors.invalidFunctionArgumentsError(name, "1", 
argSize)
       }
       Cast(args.head, dataType)
     }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 2e20d7aec8d..ed08e33829e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -663,12 +663,6 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
     }
   }
 
-  def functionAcceptsOnlyOneArgumentError(name: String): Throwable = {
-    new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1044",
-      messageParameters = Map("name" -> name))
-  }
-
   def alterV2TableSetLocationWithPartitionNotSupportedError(): Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1045",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
index e0d5874d058..319ac059385 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
@@ -94,9 +94,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1044",
+  "errorClass" : "WRONG_NUM_ARGS",
   "messageParameters" : {
-    "name" : "string"
+    "actualNum" : "2",
+    "expectedNum" : "1",
+    "functionName" : "`string`"
   },
   "queryContext" : [ {
     "objectType" : "",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to