This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new f8e06c1e1bb [SPARK-42305][SQL] Integrate `_LEGACY_ERROR_TEMP_1229` into `DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION` f8e06c1e1bb is described below commit f8e06c1e1bb335180ce01ba5c8a079687ebcecf1 Author: itholic <haejoon....@databricks.com> AuthorDate: Wed Feb 8 19:23:39 2023 +0500 [SPARK-42305][SQL] Integrate `_LEGACY_ERROR_TEMP_1229` into `DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION` ### What changes were proposed in this pull request? This PR proposes to integrate `_LEGACY_ERROR_TEMP_1229` into `DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION`. **_LEGACY_ERROR_TEMP_1229** ```json "_LEGACY_ERROR_TEMP_1229" : { "message" : [ "<decimalType> can only support precision up to <precision>." ] }, ``` **DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION** ```json "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : { "message" : [ "Decimal precision <precision> exceeds max precision <maxPrecision>." ], "sqlState" : "22003" }, ``` ### Why are the changes needed? We should assign proper name to _LEGACY_ERROR_TEMP_* ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*"` Closes #39875 from itholic/LEGACY_1229. Authored-by: itholic <haejoon....@databricks.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- core/src/main/resources/error/error-classes.json | 5 ----- .../spark/sql/catalyst/parser/AstBuilder.scala | 5 ++--- .../spark/sql/errors/QueryCompilationErrors.scala | 8 -------- .../org/apache/spark/sql/types/DecimalType.scala | 6 +++--- .../sql/catalyst/parser/ExpressionParserSuite.scala | 3 ++- .../sql-tests/results/ansi/literals.sql.out | 20 +++++++++++--------- .../resources/sql-tests/results/literals.sql.out | 20 +++++++++++--------- .../sql-tests/results/postgreSQL/numeric.sql.out | 9 +++++---- 8 files changed, 34 insertions(+), 42 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 3b8914c7f93..481f6f84af5 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -3100,11 +3100,6 @@ "Decimal scale (<scale>) cannot be greater than precision (<precision>)." ] }, - "_LEGACY_ERROR_TEMP_1229" : { - "message" : [ - "<decimalType> can only support precision up to <precision>." - ] - }, "_LEGACY_ERROR_TEMP_1231" : { "message" : [ "<key> is not a valid partition column in table <tblName>." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index dfc6e21d4a0..aea496b872b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -29,9 +29,8 @@ import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode} import org.apache.commons.codec.DecoderException import org.apache.commons.codec.binary.Hex -import org.apache.spark.SparkException +import org.apache.spark.{SparkArithmeticException, SparkException} import org.apache.spark.internal.Logging -import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, TableIdentifier} import org.apache.spark.sql.catalyst.analysis._ import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat} @@ -2604,7 +2603,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit try { Literal(BigDecimal(raw).underlying()) } catch { - case e: AnalysisException => + case e: SparkArithmeticException => throw new ParseException( errorClass = "_LEGACY_ERROR_TEMP_0061", messageParameters = Map("msg" -> e.getMessage), diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index a08f72e8313..4b0e914f279 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -2253,14 +2253,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { "precision" -> precision.toString)) } - def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: Int): Throwable = { - new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1229", - messageParameters = Map( - "decimalType" -> decimalType, - "precision" -> precision.toString)) - } - def negativeScaleNotAllowedError(scale: Int): Throwable = { SparkException.internalError(s"Negative scale is not allowed: ${scale.toString}." + s" Set the config ${toSQLConf(LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key)}" + diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala index 7d0b4a09047..2813771ce48 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala @@ -25,7 +25,7 @@ import scala.reflect.runtime.universe.typeTag import org.apache.spark.annotation.Stable import org.apache.spark.sql.catalyst.expressions.{Expression, Literal} import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalDecimalType} -import org.apache.spark.sql.errors.QueryCompilationErrors +import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} import org.apache.spark.sql.internal.SQLConf /** @@ -51,8 +51,8 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType { } if (precision > DecimalType.MAX_PRECISION) { - throw QueryCompilationErrors.decimalOnlySupportPrecisionUptoError( - DecimalType.simpleString, DecimalType.MAX_PRECISION) + throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError( + precision, DecimalType.MAX_PRECISION) } // default constructor for Java diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 9767f54740b..d0fc7199378 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -807,7 +807,8 @@ class ExpressionParserSuite extends AnalysisTest { checkError( exception = parseException("1.20E-38BD"), errorClass = "_LEGACY_ERROR_TEMP_0061", - parameters = Map("msg" -> "decimal can only support precision up to 38."), + parameters = Map("msg" -> + "[DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION] Decimal precision 40 exceeds max precision 38."), context = ExpectedContext( fragment = "1.20E-38BD", start = 0, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index 25557a56c82..402b51c1fdc 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -172,12 +172,13 @@ select 1234567890123456789012345678901234567890 -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.parser.ParseException +org.apache.spark.SparkArithmeticException { - "errorClass" : "_LEGACY_ERROR_TEMP_1229", + "errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION", + "sqlState" : "22003", "messageParameters" : { - "decimalType" : "decimal", - "precision" : "38" + "maxPrecision" : "38", + "precision" : "40" } } @@ -187,12 +188,13 @@ select 1234567890123456789012345678901234567890.0 -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.parser.ParseException +org.apache.spark.SparkArithmeticException { - "errorClass" : "_LEGACY_ERROR_TEMP_1229", + "errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION", + "sqlState" : "22003", "messageParameters" : { - "decimalType" : "decimal", - "precision" : "38" + "maxPrecision" : "38", + "precision" : "41" } } @@ -477,7 +479,7 @@ org.apache.spark.sql.catalyst.parser.ParseException { "errorClass" : "_LEGACY_ERROR_TEMP_0061", "messageParameters" : { - "msg" : "decimal can only support precision up to 38." + "msg" : "[DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION] Decimal precision 40 exceeds max precision 38." }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 25557a56c82..402b51c1fdc 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -172,12 +172,13 @@ select 1234567890123456789012345678901234567890 -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.parser.ParseException +org.apache.spark.SparkArithmeticException { - "errorClass" : "_LEGACY_ERROR_TEMP_1229", + "errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION", + "sqlState" : "22003", "messageParameters" : { - "decimalType" : "decimal", - "precision" : "38" + "maxPrecision" : "38", + "precision" : "40" } } @@ -187,12 +188,13 @@ select 1234567890123456789012345678901234567890.0 -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.parser.ParseException +org.apache.spark.SparkArithmeticException { - "errorClass" : "_LEGACY_ERROR_TEMP_1229", + "errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION", + "sqlState" : "22003", "messageParameters" : { - "decimalType" : "decimal", - "precision" : "38" + "maxPrecision" : "38", + "precision" : "41" } } @@ -477,7 +479,7 @@ org.apache.spark.sql.catalyst.parser.ParseException { "errorClass" : "_LEGACY_ERROR_TEMP_0061", "messageParameters" : { - "msg" : "decimal can only support precision up to 38." + "msg" : "[DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION] Decimal precision 40 exceeds max precision 38." }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index dcea503a66a..8e63191a803 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -3580,12 +3580,13 @@ INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.423308199106402476 -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.parser.ParseException +org.apache.spark.SparkArithmeticException { - "errorClass" : "_LEGACY_ERROR_TEMP_1229", + "errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION", + "sqlState" : "22003", "messageParameters" : { - "decimalType" : "decimal", - "precision" : "38" + "maxPrecision" : "38", + "precision" : "39" } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org