This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 93e9f6e4e4a4 [SPARK-51084][SQL] Assign appropriate error class for 
`negativeScaleNotAllowedError`
93e9f6e4e4a4 is described below

commit 93e9f6e4e4a40625fc91e59251802cd6871e9818
Author: Amanda Liu <amanda....@databricks.com>
AuthorDate: Thu Feb 6 10:39:52 2025 +0900

    [SPARK-51084][SQL] Assign appropriate error class for 
`negativeScaleNotAllowedError`
    
    ### What changes were proposed in this pull request?
    
    Clarify error message for `negativeScaleNotAllowedError` with user-facing 
error class
    
    ### Why are the changes needed?
    
    Raise a more user-friendly error message when users attempt to use a 
negative precision for DecimalType when the Spark config 
`spark.sql.legacy.allowNegativeScaleOfDecimal` is not set. Previously an 
internal error was raised, which is not correct.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, it affects the error class when users attempt to use a negative 
precision for DecimalType when the Spark config 
`spark.sql.legacy.allowNegativeScaleOfDecimal` is not set.
    
    ### How was this patch tested?
    
    Added test in `DecimalSuite.scala`
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #49807 from asl3/asl3/negativescalenotallowederror.
    
    Authored-by: Amanda Liu <amanda....@databricks.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../src/main/resources/error/error-conditions.json |  6 ++++++
 .../apache/spark/sql/errors/DataTypeErrors.scala   |  7 +++----
 .../org/apache/spark/sql/types/DecimalSuite.scala  | 22 +++++++++++++++-------
 3 files changed, 24 insertions(+), 11 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index b5232375c8e8..b9257cb56fa2 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -3923,6 +3923,12 @@
     ],
     "sqlState" : "0A000"
   },
+  "NEGATIVE_SCALE_DISALLOWED" : {
+    "message" : [
+      "Negative scale is not allowed: '<scale>'. Set the config <sqlConf> to 
\"true\" to allow it."
+    ],
+    "sqlState" : "0A000"
+  },
   "NEGATIVE_VALUES_IN_FREQUENCY_EXPRESSION" : {
     "message" : [
       "Found the negative value in <frequencyExpression>: <negativeValue>, but 
expected a positive integral value."
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
index 3492421b4378..c69c5bfb5261 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
@@ -124,10 +124,9 @@ private[sql] object DataTypeErrors extends 
DataTypeErrorsBase {
 
   def negativeScaleNotAllowedError(scale: Int): Throwable = {
     val sqlConf = 
QuotingUtils.toSQLConf("spark.sql.legacy.allowNegativeScaleOfDecimal")
-    SparkException.internalError(
-      s"Negative scale is not allowed: ${scale.toString}." +
-        s" Set the config ${sqlConf}" +
-        " to \"true\" to allow it.")
+    new AnalysisException(
+      errorClass = "NEGATIVE_SCALE_DISALLOWED",
+      messageParameters = Map("scale" -> toSQLValue(scale), "sqlConf" -> 
sqlConf))
   }
 
   def attributeNameSyntaxError(name: String): Throwable = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index 794112db5502..821d59796753 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types
 import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.{SparkArithmeticException, SparkException, 
SparkFunSuite, SparkNumberFormatException}
+import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.plans.SQLHelper
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types.Decimal._
@@ -103,6 +104,13 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
         "precision" -> "17",
         "scale" -> "0",
         "config" -> "\"spark.sql.ansi.enabled\""))
+    checkError(
+      exception = intercept[AnalysisException](Decimal(BigDecimal("10"), 2, 
-5)),
+      condition = "NEGATIVE_SCALE_DISALLOWED",
+      parameters = Map(
+        "scale" -> "-5",
+        "sqlConf" -> "\"spark.sql.legacy.allowNegativeScaleOfDecimal\""
+      ))
   }
 
   test("creating decimals with negative scale under legacy mode") {
@@ -116,15 +124,15 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
     }
   }
 
-  test("SPARK-30252: Negative scale is not allowed by default") {
+  test("SPARK-30252, SPARK-51084: Negative scale is not allowed by default") {
     def checkNegativeScaleDecimal(d: => Decimal): Unit = {
       checkError(
-        exception = intercept[SparkException] (d),
-        condition = "INTERNAL_ERROR",
-        parameters = Map("message" -> ("Negative scale is not allowed: -3. " +
-          "Set the config \"spark.sql.legacy.allowNegativeScaleOfDecimal\" " +
-          "to \"true\" to allow it."))
-      )
+        exception = intercept[AnalysisException](d),
+        condition = "NEGATIVE_SCALE_DISALLOWED",
+        parameters = Map(
+          "scale" -> "-3",
+          "sqlConf" -> "\"spark.sql.legacy.allowNegativeScaleOfDecimal\""
+        ))
     }
     checkNegativeScaleDecimal(Decimal(BigDecimal("98765"), 5, -3))
     checkNegativeScaleDecimal(Decimal(BigDecimal("98765").underlying(), 5, -3))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to