This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new dbadb5f275c [SPARK-43518][SQL] Convert `_LEGACY_ERROR_TEMP_2029` to 
INTERNAL_ERROR
dbadb5f275c is described below

commit dbadb5f275cf0519b8b1ed78decfe4ce83934825
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Tue May 16 12:47:15 2023 +0300

    [SPARK-43518][SQL] Convert `_LEGACY_ERROR_TEMP_2029` to INTERNAL_ERROR
    
    ### What changes were proposed in this pull request?
    The pr aims to convert _LEGACY_ERROR_TEMP_2029 to INTERNAL_ERROR.
    
    ### Why are the changes needed?
    1. I found that it can only be triggered it with the parameter value: 
UP,DOWN,HALF_DOWN,UNNECESSARY, but from a user's perspective, it is impossible 
(the internal code limits its value to only: HALF_UP,HALF_EVEN,CEILING,FLOOR), 
so we should convert it to an internal error.
    2. The changes improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    1. Update existed UT.
    2. Pass GA.
    
    Closes #41179 from panbingkun/SPARK-43518.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json              |  5 -----
 .../org/apache/spark/sql/errors/QueryExecutionErrors.scala    |  6 ++----
 .../test/scala/org/apache/spark/sql/types/DecimalSuite.scala  | 11 +++++++++++
 3 files changed, 13 insertions(+), 9 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index fa838a6da76..edc5a5a66e5 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -3873,11 +3873,6 @@
       "This line should be unreachable<err>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2029" : {
-    "message" : [
-      "Not supported rounding mode: <roundMode>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2030" : {
     "message" : [
       "Can not handle nested schema yet...  plan <plan>."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 1f3ee517dd2..52e8c7df91e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -514,10 +514,8 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
       messageParameters = Map("err" -> err))
   }
 
-  def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): 
SparkRuntimeException = {
-    new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2029",
-      messageParameters = Map("roundMode" -> roundMode.toString()))
+  def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): 
SparkException = {
+    SparkException.internalError(s"Not supported rounding mode: 
${roundMode.toString}.")
   }
 
   def resolveCannotHandleNestedSchema(plan: LogicalPlan): 
SparkRuntimeException = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index 465c25118fa..ab3f831fbcb 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -303,6 +303,17 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
     }
   }
 
+  test("Not supported rounding mode: HALF_DOWN") {
+    val d = Decimal(10000L, 100, 80)
+    checkError(
+      exception = intercept[SparkException] {
+        d.toPrecision(5, 50, BigDecimal.RoundingMode.HALF_DOWN)
+      },
+      errorClass = "INTERNAL_ERROR",
+      parameters = Map("message" -> "Not supported rounding mode: HALF_DOWN.")
+    )
+  }
+
   test("SPARK-20341: support BigInt's value does not fit in long value range") 
{
     val bigInt = scala.math.BigInt("9223372036854775808")
     val decimal = Decimal.apply(bigInt)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to