This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e72ce91250a [SPARK-44987][SQL] Assign a name to the error class 
`_LEGACY_ERROR_TEMP_1100`
e72ce91250a is described below

commit e72ce91250a9a2c40fd5ed55a50dbc46e4e7e46d
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Thu Aug 31 22:50:21 2023 +0300

    [SPARK-44987][SQL] Assign a name to the error class 
`_LEGACY_ERROR_TEMP_1100`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to assign the name `NON_FOLDABLE_ARGUMENT` to the 
legacy error class `_LEGACY_ERROR_TEMP_1100`, and improve the error message 
format: make it less restrictive.
    
    ### Why are the changes needed?
    1. To don't confuse users by slightly restrictive error message about 
literals.
    2. To assign proper name as a part of activity in SPARK-37935
    
    ### Does this PR introduce _any_ user-facing change?
    No. Only if user's code depends on error class name and message parameters.
    
    ### How was this patch tested?
    By running the modified and affected tests:
    ```
    $ build/sbt "test:testOnly *.StringFunctionsSuite"
    $ PYSPARK_PYTHON=python3 build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite"
    $ build/sbt "core/testOnly *SparkThrowableSuite"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #42737 from MaxGekk/assign-name-_LEGACY_ERROR_TEMP_1100.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json    | 11 ++++---
 docs/sql-error-conditions.md                       |  6 ++++
 .../catalyst/expressions/datetimeExpressions.scala |  2 +-
 .../sql/catalyst/expressions/mathExpressions.scala |  4 +--
 .../expressions/numberFormatExpressions.scala      |  2 +-
 .../spark/sql/errors/QueryCompilationErrors.scala  | 14 +++++----
 .../ceil-floor-with-scale-param.sql.out            | 36 ++++++++++++----------
 .../sql-tests/analyzer-results/extract.sql.out     | 18 ++++++-----
 .../results/ceil-floor-with-scale-param.sql.out    | 36 ++++++++++++----------
 .../resources/sql-tests/results/extract.sql.out    | 18 ++++++-----
 .../apache/spark/sql/StringFunctionsSuite.scala    |  8 ++---
 11 files changed, 88 insertions(+), 67 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 3b537cc3d9f..af78dd2f9f8 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -2215,6 +2215,12 @@
     ],
     "sqlState" : "42607"
   },
+  "NON_FOLDABLE_ARGUMENT" : {
+    "message" : [
+      "The function <funcName> requires the parameter <paramName> to be a 
foldable expression of the type <paramType>, but the actual argument is a 
non-foldable."
+    ],
+    "sqlState" : "22024"
+  },
   "NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION" : {
     "message" : [
       "When there are more than one MATCHED clauses in a MERGE statement, only 
the last MATCHED clause can omit the condition."
@@ -4029,11 +4035,6 @@
       "<funcName>() doesn't support the <mode> mode. Acceptable modes are 
<permissiveMode> and <failFastMode>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1100" : {
-    "message" : [
-      "The '<argName>' parameter of function '<funcName>' needs to be a 
<requiredType> literal."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1103" : {
     "message" : [
       "Unsupported component type <clz> in arrays."
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index 89c27f72ea0..33072f6c440 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -1305,6 +1305,12 @@ Cannot call function `<functionName>` because named 
argument references are not
 
 It is not allowed to use an aggregate function in the argument of another 
aggregate function. Please use the inner aggregate function in a sub-query.
 
+### NON_FOLDABLE_ARGUMENT
+
+[SQLSTATE: 22024](sql-error-conditions-sqlstates.html#class-22-data-exception)
+
+The function `<funcName>` requires the parameter `<paramName>` to be a 
foldable expression of the type `<paramType>`, but the actual argument is a 
non-foldable.
+
 ### NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION
 
 [SQLSTATE: 
42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 51ddf2b85f8..30a6bec1868 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -2934,7 +2934,7 @@ object Extract {
         }
       }
     } else {
-      throw QueryCompilationErrors.requireLiteralParameter(funcName, "field", 
"string")
+      throw QueryCompilationErrors.nonFoldableArgumentError(funcName, "field", 
StringType)
     }
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
index add59a38b72..89f354db5a9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
@@ -283,10 +283,10 @@ trait CeilFloorExpressionBuilderBase extends 
ExpressionBuilder {
     } else if (numArgs == 2) {
       val scale = expressions(1)
       if (!(scale.foldable && scale.dataType == IntegerType)) {
-        throw QueryCompilationErrors.requireLiteralParameter(funcName, 
"scale", "int")
+        throw QueryCompilationErrors.nonFoldableArgumentError(funcName, 
"scale", IntegerType)
       }
       if (scale.eval() == null) {
-        throw QueryCompilationErrors.requireLiteralParameter(funcName, 
"scale", "int")
+        throw QueryCompilationErrors.nonFoldableArgumentError(funcName, 
"scale", IntegerType)
       }
       buildWithTwoParams(expressions(0), scale)
     } else {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/numberFormatExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/numberFormatExpressions.scala
index 7875ed8fe20..38abcc41cbf 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/numberFormatExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/numberFormatExpressions.scala
@@ -247,7 +247,7 @@ object ToCharacterBuilder extends ExpressionBuilder {
         case _: DatetimeType => DateFormatClass(inputExpr, format)
         case _: BinaryType =>
           if (!(format.dataType == StringType && format.foldable)) {
-            throw QueryCompilationErrors.requireLiteralParameter(funcName, 
"format", "string")
+            throw QueryCompilationErrors.nonFoldableArgumentError(funcName, 
"format", StringType)
           }
           
format.eval().asInstanceOf[UTF8String].toString.toLowerCase(Locale.ROOT).trim 
match {
             case "base64" => Base64(inputExpr)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index e579e5cf565..a97abf89434 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1207,14 +1207,16 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
         "failFastMode" -> FailFastMode.name))
   }
 
-  def requireLiteralParameter(
-      funcName: String, argName: String, requiredType: String): Throwable = {
+  def nonFoldableArgumentError(
+      funcName: String,
+      paramName: String,
+      paramType: DataType): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1100",
+      errorClass = "NON_FOLDABLE_ARGUMENT",
       messageParameters = Map(
-        "argName" -> argName,
-        "funcName" -> funcName,
-        "requiredType" -> requiredType))
+        "funcName" -> toSQLId(funcName),
+        "paramName" -> toSQLId(paramName),
+        "paramType" -> toSQLType(paramType)))
   }
 
   def literalTypeUnsupportedForSourceTypeError(field: String, source: 
Expression): Throwable = {
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ceil-floor-with-scale-param.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ceil-floor-with-scale-param.sql.out
index c76b2e5284a..950584caa81 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ceil-floor-with-scale-param.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ceil-floor-with-scale-param.sql.out
@@ -81,11 +81,12 @@ SELECT CEIL(2.5, null)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "ceil",
-    "requiredType" : "int"
+    "funcName" : "`ceil`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -102,11 +103,12 @@ SELECT CEIL(2.5, 'a')
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "ceil",
-    "requiredType" : "int"
+    "funcName" : "`ceil`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -223,11 +225,12 @@ SELECT FLOOR(2.5, null)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "floor",
-    "requiredType" : "int"
+    "funcName" : "`floor`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -244,11 +247,12 @@ SELECT FLOOR(2.5, 'a')
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "floor",
-    "requiredType" : "int"
+    "funcName" : "`floor`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out
index 6085457deaa..eabe92ab12d 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out
@@ -932,11 +932,12 @@ select date_part(c, c) from t
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "field",
-    "funcName" : "date_part",
-    "requiredType" : "string"
+    "funcName" : "`date_part`",
+    "paramName" : "`field`",
+    "paramType" : "\"STRING\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -964,11 +965,12 @@ select date_part(i, i) from t
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "field",
-    "funcName" : "date_part",
-    "requiredType" : "string"
+    "funcName" : "`date_part`",
+    "paramName" : "`field`",
+    "paramType" : "\"STRING\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
index d55e665a2a1..b15682b0a51 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
@@ -94,11 +94,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "ceil",
-    "requiredType" : "int"
+    "funcName" : "`ceil`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -117,11 +118,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "ceil",
-    "requiredType" : "int"
+    "funcName" : "`ceil`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -253,11 +255,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "floor",
-    "requiredType" : "int"
+    "funcName" : "`floor`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -276,11 +279,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "scale",
-    "funcName" : "floor",
-    "requiredType" : "int"
+    "funcName" : "`floor`",
+    "paramName" : "`scale`",
+    "paramType" : "\"INT\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out 
b/sql/core/src/test/resources/sql-tests/results/extract.sql.out
index cc6e8bcb36c..8416327ef31 100644
--- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out
@@ -714,11 +714,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "field",
-    "funcName" : "date_part",
-    "requiredType" : "string"
+    "funcName" : "`date_part`",
+    "paramName" : "`field`",
+    "paramType" : "\"STRING\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -745,11 +746,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1100",
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "22024",
   "messageParameters" : {
-    "argName" : "field",
-    "funcName" : "date_part",
-    "requiredType" : "string"
+    "funcName" : "`date_part`",
+    "paramName" : "`field`",
+    "paramType" : "\"STRING\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
index 03b9053c71a..c61a62f293f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
@@ -875,11 +875,11 @@ class StringFunctionsSuite extends QueryTest with 
SharedSparkSession {
         exception = intercept[AnalysisException] {
           df2.select(func(col("input"), col("format"))).collect()
         },
-        errorClass = "_LEGACY_ERROR_TEMP_1100",
+        errorClass = "NON_FOLDABLE_ARGUMENT",
         parameters = Map(
-          "argName" -> "format",
-          "funcName" -> funcName,
-          "requiredType" -> "string"))
+          "funcName" -> s"`$funcName`",
+          "paramName" -> "`format`",
+          "paramType" -> "\"STRING\""))
       checkError(
         exception = intercept[AnalysisException] {
           df2.select(func(col("input"), lit("invalid_format"))).collect()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to