This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 45a51d6311a [SPARK-42316][SQL] Assign name to _LEGACY_ERROR_TEMP_2044
45a51d6311a is described below

commit 45a51d6311a041eb7480199a6da19f67d56367b7
Author: Hisoka <fanjiaemi...@qq.com>
AuthorDate: Wed Apr 5 10:57:58 2023 +0300

    [SPARK-42316][SQL] Assign name to _LEGACY_ERROR_TEMP_2044
    
    ### What changes were proposed in this pull request?
    This PR proposes to assign name to _LEGACY_ERROR_TEMP_2044, 
"BINARY_ARITHMETIC_OVERFLOW".
    
    ### Why are the changes needed?
    Assign proper name to LEGACY_ERROR_TEMP
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
     ./build/sbt "testOnly 
org.apache.spark.sql.errors.QueryExecutionErrorsSuite"
    
    Closes #40609 from Hisoka-X/legacy_error_2044_.
    
    Authored-by: Hisoka <fanjiaemi...@qq.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json          | 11 ++++++-----
 .../apache/spark/sql/errors/QueryExecutionErrors.scala    |  6 +++---
 .../spark/sql/errors/QueryExecutionErrorsSuite.scala      | 15 +++++++++++++++
 3 files changed, 24 insertions(+), 8 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index d330ea09f30..d43996f6a49 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -35,6 +35,12 @@
     ],
     "sqlState" : "22003"
   },
+  "BINARY_ARITHMETIC_OVERFLOW" : {
+    "message" : [
+      "<value1> <symbol> <value2> caused overflow."
+    ],
+    "sqlState" : "22003"
+  },
   "CANNOT_CAST_DATATYPE" : {
     "message" : [
       "Cannot cast <sourceType> to <targetType>."
@@ -3880,11 +3886,6 @@
       "- <sqlValue> caused overflow."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2044" : {
-    "message" : [
-      "<sqlValue1> <symbol> <sqlValue2> caused overflow."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2045" : {
     "message" : [
       "Unsupported table change: <message>"
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 7ec9f41af36..ad0796e4eb1 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -654,11 +654,11 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
   def binaryArithmeticCauseOverflowError(
       eval1: Short, symbol: String, eval2: Short): SparkArithmeticException = {
     new SparkArithmeticException(
-      errorClass = "_LEGACY_ERROR_TEMP_2044",
+      errorClass = "BINARY_ARITHMETIC_OVERFLOW",
       messageParameters = Map(
-        "sqlValue1" -> toSQLValue(eval1, ShortType),
+        "value1" -> toSQLValue(eval1, ShortType),
         "symbol" -> symbol,
-        "sqlValue2" -> toSQLValue(eval2, ShortType)),
+        "value2" -> toSQLValue(eval2, ShortType)),
       context = Array.empty,
       summary = "")
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 5f94840a16a..6cdbbc4c1a3 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -625,6 +625,21 @@ class QueryExecutionErrorsSuite
     }
   }
 
+  test("BINARY_ARITHMETIC_OVERFLOW: byte plus byte result overflow") {
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
+      checkError(
+        exception = intercept[SparkArithmeticException] {
+          sql(s"select 127Y + 5Y").collect()
+        },
+        errorClass = "BINARY_ARITHMETIC_OVERFLOW",
+        parameters = Map(
+          "value1" -> "127S",
+          "symbol" -> "+",
+          "value2" -> "5S"),
+        sqlState = "22003")
+    }
+  }
+
   test("UNSUPPORTED_DATATYPE: invalid StructType raw format") {
     checkError(
       exception = intercept[SparkIllegalArgumentException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to