This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 09b37114c9c [SPARK-43598][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_2400
09b37114c9c is described below

commit 09b37114c9c110f5e24b71f55ff40b26392a7d41
Author: Jiaan Geng <belie...@163.com>
AuthorDate: Mon May 22 09:47:35 2023 +0300

    [SPARK-43598][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_2400
    
    ### What changes were proposed in this pull request?
    The pr aims to assign a name to the error class _LEGACY_ERROR_TEMP_2400.
    
    ### Why are the changes needed?
    Improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    
    ### How was this patch tested?
    Exists test cases.
    
    Closes #41242 from beliefer/SPARK-43598.
    
    Authored-by: Jiaan Geng <belie...@163.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json               | 10 +++++-----
 .../org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala |  4 ++--
 .../test/resources/sql-tests/analyzer-results/limit.sql.out    |  4 ++--
 .../sql-tests/analyzer-results/postgreSQL/limit.sql.out        |  8 ++++----
 sql/core/src/test/resources/sql-tests/results/limit.sql.out    |  4 ++--
 .../test/resources/sql-tests/results/postgreSQL/limit.sql.out  |  8 ++++----
 6 files changed, 19 insertions(+), 19 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index b130f6f6c93..8b8de042ccf 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1213,6 +1213,11 @@
       }
     }
   },
+  "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE" : {
+    "message" : [
+      "The <name> expression must evaluate to a constant value, but got 
<limitExpr>."
+    ]
+  },
   "LOCATION_ALREADY_EXISTS" : {
     "message" : [
       "Cannot name the managed table as <identifier>, as its associated 
location <location> already exists. Please pick a different table name, or 
remove the existing location first."
@@ -5216,11 +5221,6 @@
       "failed to evaluate expression <sqlExpr>: <msg>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_2400" : {
-    "message" : [
-      "The <name> expression must evaluate to a constant value, but got 
<limitExpr>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2401" : {
     "message" : [
       "The <name> expression must be integer type, but got <dataType>."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index b67b4ee9912..3240f9bee56 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -85,10 +85,10 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
   private def checkLimitLikeClause(name: String, limitExpr: Expression): Unit 
= {
     limitExpr match {
       case e if !e.foldable => limitExpr.failAnalysis(
-        errorClass = "_LEGACY_ERROR_TEMP_2400",
+        errorClass = "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
         messageParameters = Map(
           "name" -> name,
-          "limitExpr" -> limitExpr.sql))
+          "limitExpr" -> toSQLExpr(limitExpr)))
       case e if e.dataType != IntegerType => limitExpr.failAnalysis(
         errorClass = "_LEGACY_ERROR_TEMP_2401",
         messageParameters = Map(
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out
index c38e4696605..3b2ddb5dae1 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out
@@ -124,9 +124,9 @@ SELECT * FROM testdata LIMIT key > 3
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2400",
+  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "(spark_catalog.default.testdata.key > 3)",
+    "limitExpr" : "\"(key > 3)\"",
     "name" : "limit"
   },
   "queryContext" : [ {
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
index 8964e7f2340..7ef2912cef2 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
@@ -141,9 +141,9 @@ select * from int8_tbl limit (case when random() < 0.5 then 
bigint(null) end)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2400",
+  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN 
CAST(NULL AS BIGINT) END",
+    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -161,9 +161,9 @@ select * from int8_tbl offset (case when random() < 0.5 
then bigint(null) end)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2400",
+  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN 
CAST(NULL AS BIGINT) END",
+    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "offset"
   },
   "queryContext" : [ {
diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
index 1e21a88fe8a..9a288681526 100644
--- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
@@ -125,9 +125,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2400",
+  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "(spark_catalog.default.testdata.key > 3)",
+    "limitExpr" : "\"(key > 3)\"",
     "name" : "limit"
   },
   "queryContext" : [ {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out
index f4f62be010a..1489399c782 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out
@@ -132,9 +132,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2400",
+  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN 
CAST(NULL AS BIGINT) END",
+    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -154,9 +154,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2400",
+  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN 
CAST(NULL AS BIGINT) END",
+    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "offset"
   },
   "queryContext" : [ {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to