This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 165ce4eb7d6 [SPARK-39060][SQL] Typo in error messages of decimal 
overflow
165ce4eb7d6 is described below

commit 165ce4eb7d6d75201beb1bff879efa99fde24f94
Author: Vitalii Li <vitalii...@databricks.com>
AuthorDate: Wed May 4 09:41:53 2022 +0300

    [SPARK-39060][SQL] Typo in error messages of decimal overflow
    
    ### What changes were proposed in this pull request?
    
    This PR removes extra curly bracket from debug string for Decimal type in 
SQL.
    
    ### Why are the changes needed?
    
    Typo in error messages of decimal overflow.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    By running updated test:
    ```
    $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z 
decimalArithmeticOperations.sql"
    ```
    
    Closes #36397 from vli-databricks/SPARK-39060.
    
    Authored-by: Vitalii Li <vitalii...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/scala/org/apache/spark/sql/types/Decimal.scala       | 4 ++--
 sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out   | 2 +-
 .../sql-tests/results/ansi/decimalArithmeticOperations.sql.out    | 8 ++++----
 .../src/test/resources/sql-tests/results/ansi/interval.sql.out    | 2 +-
 .../apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala   | 2 +-
 5 files changed, 9 insertions(+), 9 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 12ce7a30601..1eeaa46736e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -227,9 +227,9 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
 
   def toDebugString: String = {
     if (decimalVal.ne(null)) {
-      s"Decimal(expanded,$decimalVal,$precision,$scale})"
+      s"Decimal(expanded, $decimalVal, $precision, $scale)"
     } else {
-      s"Decimal(compact,$longVal,$precision,$scale})"
+      s"Decimal(compact, $longVal, $precision, $scale)"
     }
   }
 
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 566e27a0e20..476ec158f1f 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -666,7 +666,7 @@ select cast('123.45' as decimal(4, 2))
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,123.45,5,2}) cannot be 
represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to 
false to bypass this error.
+[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 123.45, 5, 2) cannot be 
represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to 
false to bypass this error.
 == SQL(line 1, position 7) ==
 select cast('123.45' as decimal(4, 2))
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
index 1640875973e..d4b15d92952 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
@@ -76,7 +76,7 @@ select (5e36BD + 0.1) + 5e36BD
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-[CANNOT_CHANGE_DECIMAL_PRECISION] 
Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be 
represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to 
false to bypass this error.
+[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 
10000000000000000000000000000000000000.1, 39, 1) cannot be represented as 
Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass 
this error.
 == SQL(line 1, position 7) ==
 select (5e36BD + 0.1) + 5e36BD
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -88,7 +88,7 @@ select (-4e36BD - 0.1) - 7e36BD
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-[CANNOT_CHANGE_DECIMAL_PRECISION] 
Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be 
represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to 
false to bypass this error.
+[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 
-11000000000000000000000000000000000000.1, 39, 1) cannot be represented as 
Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass 
this error.
 == SQL(line 1, position 7) ==
 select (-4e36BD - 0.1) - 7e36BD
        ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -100,7 +100,7 @@ select 12345678901234567890.0 * 12345678901234567890.0
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-[CANNOT_CHANGE_DECIMAL_PRECISION] 
Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be 
represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to 
false to bypass this error.
+[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 
152415787532388367501905199875019052100, 39, 0) cannot be represented as 
Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass 
this error.
 == SQL(line 1, position 7) ==
 select 12345678901234567890.0 * 12345678901234567890.0
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -112,7 +112,7 @@ select 1e35BD / 0.1
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-[CANNOT_CHANGE_DECIMAL_PRECISION] 
Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be 
represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to 
false to bypass this error.
+[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 
1000000000000000000000000000000000000, 37, 0) cannot be represented as 
Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass 
this error.
 == SQL(line 1, position 7) ==
 select 1e35BD / 0.1
        ^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index f67931e6122..f0a608cbedd 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -664,7 +664,7 @@ select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,1234567890123456789,20,0}) 
cannot be represented as Decimal(18, 6). If necessary set 
"spark.sql.ansi.enabled" to false to bypass this error.
+[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1234567890123456789, 20, 
0) cannot be represented as Decimal(18, 6). If necessary set 
"spark.sql.ansi.enabled" to false to bypass this error.
 == SQL(line 1, position 7) ==
 select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index ba9858bc988..78b78f99ab0 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -75,7 +75,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase
       },
       errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION",
       msg =
-        "Decimal(expanded,66666666666666.666,17,3}) cannot be represented as 
Decimal(8, 1). " +
+        "Decimal(expanded, 66666666666666.666, 17, 3) cannot be represented as 
Decimal(8, 1). " +
         s"If necessary set $ansiConf to false to bypass this error." +
         """
           |== SQL(line 1, position 7) ==


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to