This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3ef0362174ce [SPARK-55714][SQL] JDK might throw ArithmeticException 
without message
3ef0362174ce is described below

commit 3ef0362174ce60e6495096b76468ea96a4a212bd
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Mar 2 08:31:49 2026 +0800

    [SPARK-55714][SQL] JDK might throw ArithmeticException without message
    
    ### What changes were proposed in this pull request?
    
    Canonicalize the error message of `SparkArithmeticException`:
    - null => "overflow"
    - "byte overflow", "long overflow", etc., to "overflow".
    - leave others as-is.
    
    Update tests to accept null message from `ArithmeticException`
    
    ### Why are the changes needed?
    
    During test, I found JDK 25 might throw `ArithmeticException` with `null` 
message, and it varies on different platforms / CPU models, for example, it's 
different on Apple M1 and x86 Server, and even different on Intel Skylake and 
AMD Zen4 (both are x86 but with different instructions support)
    
    https://bugs.openjdk.org/browse/JDK-8367990
    
    > This is expected behavior. For "hot throws", the JIT will produce 
compiled code that does not go through the interpreter via deoptimization to 
throw an exception but throws a pre-allocated exception object without a stack 
trace or message. ... this optimization can be disabled with 
-XX:-OmitStackTraceInFastThrow.
    
    In other words, the error message is not something in the API contract.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Some `ARITHMETIC_OVERFLOW` error condition message changes from "byte 
overflow", "long overflow", etc., to "overflow".
    
    ### How was this patch tested?
    
    Pass GHA for JDK 17. JDK 25 will be covered by daily tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No, the patch is crafted by hand.
    
    Closes #54514 from pan3793/SPARK-55714.
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../org/apache/spark/sql/errors/ExecutionErrors.scala    | 11 ++++++++++-
 .../spark/sql/catalyst/CatalystTypeConvertersSuite.scala |  4 ++--
 .../catalyst/expressions/ArithmeticExpressionSuite.scala |  8 ++++----
 .../sql/catalyst/expressions/DateExpressionsSuite.scala  |  6 +++---
 .../catalyst/expressions/IntervalExpressionsSuite.scala  |  4 ++--
 .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala     |  4 ++--
 .../spark/sql/catalyst/util/IntervalUtilsSuite.scala     |  2 +-
 .../resources/sql-tests/results/postgreSQL/int4.sql.out  | 12 ++++++------
 .../resources/sql-tests/results/postgreSQL/int8.sql.out  |  8 ++++----
 .../sql-tests/results/postgreSQL/int8.sql.out.java21     |  8 ++++----
 .../sql-tests/results/postgreSQL/window_part2.sql.out    |  4 ++--
 .../src/test/resources/sql-tests/results/sql-udf.sql.out |  2 +-
 .../resources/sql-tests/results/try_aggregates.sql.out   |  4 ++--
 .../sql-tests/results/try_aggregates.sql.out.java21      |  4 ++--
 .../resources/sql-tests/results/try_arithmetic.sql.out   | 16 ++++++++--------
 .../org/apache/spark/sql/ColumnExpressionSuite.scala     | 14 +++++++-------
 .../spark/sql/errors/QueryExecutionErrorsSuite.scala     |  2 +-
 17 files changed, 61 insertions(+), 52 deletions(-)

diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
index 1a4369b172f8..7954576f351d 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
@@ -119,13 +119,22 @@ private[sql] trait ExecutionErrors extends 
DataTypeErrorsBase {
       message: String,
       suggestedFunc: String = "",
       context: QueryContext = null): ArithmeticException = {
+    val canonicalMessage = message match {
+      // For "hot throws", the JIT will produce compiled code that does not
+      // go through the interpreter via deoptimization to throw an exception
+      // but throws a pre-allocated exception object without a stack trace
+      // or message. See https://bugs.openjdk.org/browse/JDK-8367990
+      case null => "overflow"
+      case m if m.contains("overflow") => "overflow"
+      case m => m
+    }
     val alternative = if (suggestedFunc.nonEmpty) {
       s" Use '$suggestedFunc' to tolerate overflow and return NULL instead."
     } else ""
     new SparkArithmeticException(
       errorClass = "ARITHMETIC_OVERFLOW",
       messageParameters = Map(
-        "message" -> message,
+        "message" -> canonicalMessage,
         "alternative" -> alternative,
         "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)),
       context = getQueryContext(context),
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
index 1b46825b3414..9f4ce6797cf8 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
@@ -302,7 +302,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
     val errMsg = intercept[ArithmeticException] {
       IntervalUtils.durationToMicros(Duration.ofSeconds(Long.MaxValue, 
Long.MaxValue))
     }.getMessage
-    assert(errMsg.contains("long overflow"))
+    assert(errMsg == null || errMsg.contains("overflow"))
   }
 
   test("SPARK-35726: Truncate java.time.Duration by fields of day-time 
interval type") {
@@ -357,7 +357,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
     val errMsg = intercept[ArithmeticException] {
       IntervalUtils.periodToMonths(Period.of(Int.MaxValue, Int.MaxValue, 
Int.MaxValue))
     }.getMessage
-    assert(errMsg.contains("integer overflow"))
+    assert(errMsg == null || errMsg.contains("overflow"))
   }
 
   test("SPARK-35769: Truncate java.time.Period by fields of year-month 
interval type") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
index 72be3031ace6..7d659fca5df2 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
@@ -119,19 +119,19 @@ class ArithmeticExpressionSuite extends SparkFunSuite 
with ExpressionEvalHelper
     withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
       checkErrorInExpression[SparkArithmeticException](
         UnaryMinus(Literal(Long.MinValue)), "ARITHMETIC_OVERFLOW",
-        Map("message" -> "long overflow", "alternative" -> "",
+        Map("message" -> "overflow", "alternative" -> "",
           "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
       checkErrorInExpression[SparkArithmeticException](
         UnaryMinus(Literal(Int.MinValue)), "ARITHMETIC_OVERFLOW",
-        Map("message" -> "integer overflow", "alternative" -> "",
+        Map("message" -> "overflow", "alternative" -> "",
           "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
       checkErrorInExpression[SparkArithmeticException](
         UnaryMinus(Literal(Short.MinValue)), "ARITHMETIC_OVERFLOW",
-        Map("message" -> "short overflow", "alternative" -> "",
+        Map("message" -> "overflow", "alternative" -> "",
           "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
       checkErrorInExpression[SparkArithmeticException](
         UnaryMinus(Literal(Byte.MinValue)), "ARITHMETIC_OVERFLOW",
-        Map("message" -> "byte overflow", "alternative" -> "",
+        Map("message" -> "overflow", "alternative" -> "",
           "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
       checkEvaluation(UnaryMinus(positiveShortLit), (- positiveShort).toShort)
       checkEvaluation(UnaryMinus(negativeShortLit), (- negativeShort).toShort)
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 4e942b8b3e51..540c9830deb4 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -1307,7 +1307,7 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
             timeZoneId = Some(tz)),
           Duration.ZERO)
       }.getMessage
-      assert(errMsg.contains("overflow"))
+      assert(errMsg == null || errMsg.contains("overflow"))
 
       Seq(false, true).foreach { legacy =>
         checkConsistencyBetweenInterpretedAndCodegen(
@@ -1372,7 +1372,7 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
             timeZoneId = Some(tz)),
           Duration.ZERO)
       }.getMessage
-      assert(errMsg.contains("overflow"))
+      assert(errMsg == null || errMsg.contains("overflow"))
 
       Seq(false, true).foreach { legacy =>
         checkConsistencyBetweenInterpretedAndCodegen(
@@ -1822,7 +1822,7 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
             null)
         }.getCause
         assert(e.isInstanceOf[ArithmeticException])
-        assert(e.getMessage.contains("long overflow"))
+        assert(e.getMessage == null || e.getMessage.contains("overflow"))
 
         checkEvaluation(
           TimestampAddInterval(
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
index 8fb72ad53062..e7d8620d8c61 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
@@ -155,7 +155,7 @@ class IntervalExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     check("-100 years -1 millisecond", 0.5, "-50 years -500 microseconds")
     check("2 months 4 seconds", -0.5, "-1 months -2 seconds")
     check("1 month 2 microseconds", 1.5, "1 months 3 microseconds")
-    check("2 months", Int.MaxValue, "integer overflow", Some(true))
+    check("2 months", Int.MaxValue, "overflow", Some(true))
     check("2 months", Int.MaxValue, s"${Int.MaxValue} months", Some(false))
   }
 
@@ -188,7 +188,7 @@ class IntervalExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     check("1 month 3 microsecond", 1.5, "2 microseconds")
     check("1 second", 0, "Division by zero", Some(true))
     check("1 second", 0, null, Some(false))
-    check(s"${Int.MaxValue} months", 0.9, "integer overflow", Some(true))
+    check(s"${Int.MaxValue} months", 0.9, "overflow", Some(true))
     check(s"${Int.MaxValue} months", 0.9, s"${Int.MaxValue} months", 
Some(false))
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index b5b69a834037..605a6ffafe8c 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -832,7 +832,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with 
Matchers with SQLHelper {
       val msg = intercept[ArithmeticException] {
         DateTimeUtils.localDateTimeToMicros(dt)
       }.getMessage
-      assert(msg == "long overflow")
+      assert(msg == null || msg.contains("overflow"))
     }
   }
 
@@ -1445,7 +1445,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with 
Matchers with SQLHelper {
       },
       condition = "ARITHMETIC_OVERFLOW",
       parameters = Map(
-        "message" -> "long overflow",
+        "message" -> "overflow",
         "alternative" -> "",
         "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY))
     )
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
index a87d599711cf..e29f306cd8bb 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
@@ -481,7 +481,7 @@ class IntervalUtilsSuite extends SparkFunSuite with 
SQLHelper {
     assert(divide(interval, 0.9) === new CalendarInterval(Int.MaxValue, 
Int.MaxValue,
       ((Int.MaxValue / 9.0) * MICROS_PER_DAY).round))
     val e1 = intercept[ArithmeticException](divideExact(interval, 0.9))
-    assert(e1.getMessage.contains("integer overflow"))
+    assert(e1.getMessage.contains("overflow"))
 
     interval = new CalendarInterval(123, 456, 789)
     assert(divide(interval, 0) === null)
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out
index 16c18c86f291..b66808fe9c7e 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out
@@ -203,7 +203,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -238,7 +238,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -273,7 +273,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -309,7 +309,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -345,7 +345,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_subtract' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -381,7 +381,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_subtract' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
index f6e4bd8bd7e0..3c54a87babe3 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
@@ -395,7 +395,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -918,7 +918,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -958,7 +958,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -998,7 +998,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out.java21 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out.java21
index ee3f8625da8a..e02bb04eed09 100755
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out.java21
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out.java21
@@ -395,7 +395,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -918,7 +918,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -958,7 +958,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -998,7 +998,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
index 6cf5e69758d2..6737b290275a 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
@@ -228,7 +228,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -251,7 +251,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out 
b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
index de65b20b1408..a5e7965c10f4 100644
--- a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
@@ -1111,7 +1111,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out 
b/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out
index 94048ac8897b..1b1ba7ba2c17 100644
--- a/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out
@@ -157,7 +157,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -365,7 +365,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out.java21 
b/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out.java21
index 9d3c97baecab..7822a26f35a9 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out.java21
+++ 
b/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out.java21
@@ -157,7 +157,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -365,7 +365,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out 
b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out
index b6e154c34f2a..266af5a4fe34 100644
--- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out
@@ -67,7 +67,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -91,7 +91,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -307,7 +307,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -331,7 +331,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -505,7 +505,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -529,7 +529,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -671,7 +671,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -695,7 +695,7 @@ org.apache.spark.SparkArithmeticException
   "messageParameters" : {
     "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
     "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
+    "message" : "overflow"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
index 2e749818cbf3..27d378ccd2bf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
@@ -2656,7 +2656,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
           .select($"date" + $"interval")
           .collect()
       }
-      assert(e.getMessage.contains("integer overflow"))
+      assert(e.getMessage.contains("overflow"))
     }
   }
 
@@ -2685,7 +2685,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
           .select($"date" - $"interval")
           .collect()
       }
-      assert(e.getMessage.contains("integer overflow"))
+      assert(e.getMessage.contains("overflow"))
     }
   }
 
@@ -2723,7 +2723,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
           .collect()
       }.getCause
       assert(e.isInstanceOf[ArithmeticException])
-      assert(e.getMessage.contains("long overflow"))
+      assert(e.getMessage == null || e.getMessage.contains("overflow"))
     }
   }
 
@@ -2760,7 +2760,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
           .collect()
       }.getCause
       assert(e.isInstanceOf[ArithmeticException])
-      assert(e.getMessage.contains("long overflow"))
+      assert(e.getMessage == null || e.getMessage.contains("overflow"))
     }
   }
 
@@ -2807,7 +2807,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
             .collect()
         }.getCause
         assert(e.isInstanceOf[ArithmeticException])
-        assert(e.getMessage.contains("long overflow"))
+        assert(e.getMessage == null || e.getMessage.contains("overflow"))
       }
     }
   }
@@ -3002,7 +3002,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
           .collect()
       }.getCause
       assert(e.isInstanceOf[ArithmeticException])
-      assert(e.getMessage.contains("long overflow"))
+      assert(e.getMessage == null || e.getMessage.contains("overflow"))
     }
   }
 
@@ -3074,7 +3074,7 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
             .collect()
         }.getCause
         assert(e.isInstanceOf[ArithmeticException])
-        assert(e.getMessage.contains("long overflow"))
+        assert(e.getMessage == null || e.getMessage.contains("overflow"))
       }
     }
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index fcecaf25d4ca..125e38dccfd5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -708,7 +708,7 @@ class QueryExecutionErrorsSuite
       ),
       condition = "ARITHMETIC_OVERFLOW",
       parameters = Map(
-        "message" -> "integer overflow",
+        "message" -> "overflow",
         "alternative" -> "",
         "config" -> s""""${SQLConf.ANSI_ENABLED.key}""""))
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to