This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a12d121159d [SPARK-40183][SQL] Use error class 
NUMERIC_VALUE_OUT_OF_RANGE for overflow in decimal conversion
a12d121159d is described below

commit a12d121159d0ab8293f70b819cb489cf6126224d
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Tue Aug 23 10:01:15 2022 -0700

    [SPARK-40183][SQL] Use error class NUMERIC_VALUE_OUT_OF_RANGE for overflow 
in decimal conversion
    
    ### What changes were proposed in this pull request?
    
    Use error class NUMERIC_VALUE_OUT_OF_RANGE for overflow in decimal 
conversion, instead of the confusing error class 
`CANNOT_CHANGE_DECIMAL_PRECISION`.
    Also, use `decimal.toPlainString` instead of `decimal.toDebugString` in 
these error messages.
    
    ### Why are the changes needed?
    
    * the error class `CANNOT_CHANGE_DECIMAL_PRECISION` is confusing
    * the output `decimal.toDebugString` contains internal details, users 
doesn't need to know it.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes but minor, enhance error message of overflow exception in decimal 
conversions.
    
    ### How was this patch tested?
    
    Existing UT
    
    Closes #37620 from gengliangwang/reviseDecimalError.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 core/src/main/resources/error/error-classes.json   | 12 +++----
 .../spark/sql/errors/QueryExecutionErrors.scala    |  4 +--
 .../resources/sql-tests/results/ansi/cast.sql.out  |  8 ++---
 .../ansi/decimalArithmeticOperations.sql.out       | 40 +++++++++++-----------
 .../sql-tests/results/ansi/interval.sql.out        |  4 +--
 .../test/resources/sql-tests/results/cast.sql.out  |  4 +--
 .../sql/errors/QueryExecutionAnsiErrorsSuite.scala |  6 ++--
 7 files changed, 39 insertions(+), 39 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 3f6c1ca0362..d13849a6c7c 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -17,12 +17,6 @@
     ],
     "sqlState" : "22005"
   },
-  "CANNOT_CHANGE_DECIMAL_PRECISION" : {
-    "message" : [
-      "<value> cannot be represented as Decimal(<precision>, <scale>). If 
necessary set <config> to \"false\" to bypass this error."
-    ],
-    "sqlState" : "22005"
-  },
   "CANNOT_INFER_DATE" : {
     "message" : [
       "Cannot infer date in schema inference when LegacyTimeParserPolicy is 
\"LEGACY\". Legacy Date formatter does not support strict date format matching 
which is required to avoid inferring timestamps and other non-date entries to 
date."
@@ -342,6 +336,12 @@
       "The comparison result is null. If you want to handle null as 0 (equal), 
you can set \"spark.sql.legacy.allowNullComparisonResultInArraySort\" to 
\"true\"."
     ]
   },
+  "NUMERIC_VALUE_OUT_OF_RANGE" : {
+    "message" : [
+      "<value> cannot be represented as Decimal(<precision>, <scale>). If 
necessary set <config> to \"false\" to bypass this error."
+    ],
+    "sqlState" : "22005"
+  },
   "PARSE_CHAR_MISSING_LENGTH" : {
     "message" : [
       "DataType <type> requires a length parameter, for example <type>(10). 
Please specify the length."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index e4481a4c783..19e7a371f8f 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -114,9 +114,9 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
       decimalScale: Int,
       context: SQLQueryContext = null): ArithmeticException = {
     new SparkArithmeticException(
-      errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
       messageParameters = Array(
-        value.toDebugString,
+        value.toPlainString,
         decimalPrecision.toString,
         decimalScale.toString,
         toSQLConf(SQLConf.ANSI_ENABLED.key)),
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 95b2e0ef42b..8f53e557b59 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -1027,10 +1027,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 123.45, 5, 2)",
+    "value" : "123.45",
     "precision" : "4",
     "scale" : "2",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -1492,10 +1492,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(compact, 10, 18, 6)",
+    "value" : "0.000010",
     "precision" : "1",
     "scale" : "0",
     "config" : "\"spark.sql.ansi.enabled\""
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
index 388b01bfb62..ff799d199d9 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
@@ -74,10 +74,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 
1)",
+    "value" : "10000000000000000000000000000000000000.1",
     "precision" : "38",
     "scale" : "1",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -99,10 +99,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, -11000000000000000000000000000000000000.1, 
39, 1)",
+    "value" : "-11000000000000000000000000000000000000.1",
     "precision" : "38",
     "scale" : "1",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -124,10 +124,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 152415787532388367501905199875019052100, 39, 
0)",
+    "value" : "152415787532388367501905199875019052100",
     "precision" : "38",
     "scale" : "2",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -149,10 +149,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
1000000000000000000000000000000000000.00000000000000000000000000000000000000, 
75, 38)",
+    "value" : 
"1000000000000000000000000000000000000.00000000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -198,10 +198,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
10123456789012345678901234567890123456.00000000000000000000000000000000000000, 
76, 38)",
+    "value" : 
"10123456789012345678901234567890123456.00000000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -223,10 +223,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
101234567890123456789012345678901234.56000000000000000000000000000000000000, 
74, 38)",
+    "value" : 
"101234567890123456789012345678901234.56000000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -248,10 +248,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
10123456789012345678901234567890123.45600000000000000000000000000000000000, 73, 
38)",
+    "value" : 
"10123456789012345678901234567890123.45600000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -273,10 +273,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
1012345678901234567890123456789012.34560000000000000000000000000000000000, 72, 
38)",
+    "value" : 
"1012345678901234567890123456789012.34560000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -298,10 +298,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 
38)",
+    "value" : 
"101234567890123456789012345678901.23456000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
@@ -331,10 +331,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 
101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 
38)",
+    "value" : 
"101234567890123456789012345678901.23456000000000000000000000000000000000",
     "precision" : "38",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 197370f5a15..c5b14c878cd 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -756,10 +756,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(expanded, 1234567890123456789, 20, 0)",
+    "value" : "1234567890123456789",
     "precision" : "18",
     "scale" : "6",
     "config" : "\"spark.sql.ansi.enabled\""
diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
index b0f7c304f23..087dd831195 100644
--- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
@@ -866,10 +866,10 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "CANNOT_CHANGE_DECIMAL_PRECISION",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
   "sqlState" : "22005",
   "messageParameters" : {
-    "value" : "Decimal(compact, 10, 18, 6)",
+    "value" : "0.000010",
     "precision" : "1",
     "scale" : "0",
     "config" : "\"spark.sql.ansi.enabled\""
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index 837880a1d6d..d2d908c8fcd 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -73,15 +73,15 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase
       parameters = Map("ansiConfig" -> ansiConf))
   }
 
-  test("CANNOT_CHANGE_DECIMAL_PRECISION: cast string to decimal") {
+  test("NUMERIC_VALUE_OUT_OF_RANGE: cast string to decimal") {
     checkError(
       exception = intercept[SparkArithmeticException] {
         sql("select CAST('66666666666666.666' AS DECIMAL(8, 1))").collect()
       },
-      errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
       sqlState = "22005",
       parameters = Map(
-        "value" -> "Decimal(expanded, 66666666666666.666, 17, 3)",
+        "value" -> "66666666666666.666",
         "precision" -> "8",
         "scale" -> "1",
         "config" -> ansiConf),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to