This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bc8c2648514 [SPARK-38791][SQL] Output parameter values of error 
classes in the SQL style
bc8c2648514 is described below

commit bc8c264851457d8ef59f5b332c79296651ec5d1e
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Mon Apr 11 10:39:22 2022 +0300

    [SPARK-38791][SQL] Output parameter values of error classes in the SQL style
    
    ### What changes were proposed in this pull request?
    In the PR, I propose new trait `QueryErrorsBase` which is supposed to be 
used by `Query.*Errors`, and new method `toSQLValue()`. The method converts a 
parameter value of error classes to its SQL representation.
    
    ### Why are the changes needed?
    To improve user experience with Spark SQL. Users should see values in error 
messages in unified SQL style.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes.
    
    ### How was this patch tested?
    By running the modified test suites:
    ```
    $ build/sbt "test:testOnly *QueryExecutionErrorsSuite"
    ```
    
    Closes #36074 from MaxGekk/cleanup-error-classes.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../org/apache/spark/unsafe/types/UTF8String.java  |  8 ++--
 .../spark/sql/catalyst/util/IntervalUtils.scala    | 15 ++++---
 .../apache/spark/sql/errors/QueryErrorsBase.scala  | 47 ++++++++++++++++++++++
 .../spark/sql/errors/QueryExecutionErrors.scala    | 36 +++++++++--------
 .../catalyst/expressions/AnsiCastSuiteBase.scala   | 16 ++++----
 .../spark/sql/catalyst/expressions/CastSuite.scala | 24 ++++++++---
 .../resources/sql-tests/results/ansi/cast.sql.out  | 42 +++++++++----------
 .../resources/sql-tests/results/ansi/date.sql.out  |  4 +-
 .../sql-tests/results/ansi/interval.sql.out        | 12 +++---
 .../results/ansi/string-functions.sql.out          |  8 ++--
 .../sql-tests/results/postgreSQL/float4.sql.out    |  8 ++--
 .../sql-tests/results/postgreSQL/float8.sql.out    | 10 ++---
 .../sql-tests/results/postgreSQL/int8.sql.out      |  8 ++--
 .../sql-tests/results/postgreSQL/text.sql.out      |  4 +-
 .../results/postgreSQL/window_part2.sql.out        |  2 +-
 .../results/postgreSQL/window_part4.sql.out        |  2 +-
 .../org/apache/spark/sql/SQLInsertTestSuite.scala  |  2 +-
 .../sql/errors/QueryExecutionErrorsSuite.scala     |  2 +-
 .../org/apache/spark/sql/sources/InsertSuite.scala |  8 ++--
 19 files changed, 164 insertions(+), 94 deletions(-)

diff --git 
a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java 
b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
index 0f9d653a0eb..bf11814c981 100644
--- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
+++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
@@ -1315,7 +1315,7 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
     if (toLong(result, false)) {
       return result.value;
     }
-    throw new NumberFormatException("invalid input syntax for type numeric: " 
+ this);
+    throw new NumberFormatException("invalid input syntax for type numeric: '" 
+ this + "'");
   }
 
   /**
@@ -1329,7 +1329,7 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
     if (toInt(result, false)) {
       return result.value;
     }
-    throw new NumberFormatException("invalid input syntax for type numeric: " 
+ this);
+    throw new NumberFormatException("invalid input syntax for type numeric: '" 
+ this + "'");
   }
 
   public short toShortExact() {
@@ -1338,7 +1338,7 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
     if (result == value) {
       return result;
     }
-    throw new NumberFormatException("invalid input syntax for type numeric: " 
+ this);
+    throw new NumberFormatException("invalid input syntax for type numeric: '" 
+ this + "'");
   }
 
   public byte toByteExact() {
@@ -1347,7 +1347,7 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
     if (result == value) {
       return result;
     }
-    throw new NumberFormatException("invalid input syntax for type numeric: " 
+ this);
+    throw new NumberFormatException("invalid input syntax for type numeric: '" 
+ this + "'");
   }
 
   @Override
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
index f05e3203c07..a90a6a798cd 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
@@ -1289,7 +1289,8 @@ object IntervalUtils {
     val vShort = vInt.toShort
     if (vInt != vShort) {
       throw QueryExecutionErrors.castingCauseOverflowError(
-        toYearMonthIntervalString(v, ANSI_STYLE, startField, endField), 
ShortType)
+        Literal(v, YearMonthIntervalType(startField, endField)),
+        ShortType)
     }
     vShort
   }
@@ -1299,7 +1300,8 @@ object IntervalUtils {
     val vByte = vInt.toByte
     if (vInt != vByte) {
       throw QueryExecutionErrors.castingCauseOverflowError(
-        toYearMonthIntervalString(v, ANSI_STYLE, startField, endField), 
ByteType)
+        Literal(v, YearMonthIntervalType(startField, endField)),
+        ByteType)
     }
     vByte
   }
@@ -1347,7 +1349,8 @@ object IntervalUtils {
     val vInt = vLong.toInt
     if (vLong != vInt) {
       throw QueryExecutionErrors.castingCauseOverflowError(
-        toDayTimeIntervalString(v, ANSI_STYLE, startField, endField), 
IntegerType)
+        Literal(v, DayTimeIntervalType(startField, endField)),
+        IntegerType)
     }
     vInt
   }
@@ -1357,7 +1360,8 @@ object IntervalUtils {
     val vShort = vLong.toShort
     if (vLong != vShort) {
       throw QueryExecutionErrors.castingCauseOverflowError(
-        toDayTimeIntervalString(v, ANSI_STYLE, startField, endField), 
ShortType)
+        Literal(v, DayTimeIntervalType(startField, endField)),
+        ShortType)
     }
     vShort
   }
@@ -1367,7 +1371,8 @@ object IntervalUtils {
     val vByte = vLong.toByte
     if (vLong != vByte) {
       throw QueryExecutionErrors.castingCauseOverflowError(
-        toDayTimeIntervalString(v, ANSI_STYLE, startField, endField), ByteType)
+        Literal(v, DayTimeIntervalType(startField, endField)),
+        ByteType)
     }
     vByte
   }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
new file mode 100644
index 00000000000..e69e1382ecf
--- /dev/null
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.errors
+
+import org.apache.spark.sql.catalyst.expressions.Literal
+import org.apache.spark.sql.types.{DataType, DoubleType, FloatType}
+
+trait QueryErrorsBase {
+  private def litToErrorValue(l: Literal): String = l match {
+    case Literal(null, _) => "NULL"
+    case Literal(v: Float, FloatType) =>
+      if (v.isNaN) "NaN"
+      else if (v.isPosInfinity) "Infinity"
+      else if (v.isNegInfinity) "-Infinity"
+      else v.toString
+    case Literal(v: Double, DoubleType) =>
+      if (v.isNaN) "NaN"
+      else if (v.isPosInfinity) "Infinity"
+      else if (v.isNegInfinity) "-Infinity"
+      else l.sql
+    case l => l.sql
+  }
+
+  // Converts an error class parameter to its SQL representation
+  def toSQLValue(v: Any): String = {
+    litToErrorValue(Literal(v))
+  }
+
+  def toSQLValue(v: Any, t: DataType): String = {
+    litToErrorValue(Literal.create(v, t))
+  }
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index dc893ee342c..1dcbe82126d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -66,7 +66,7 @@ import org.apache.spark.util.CircularBuffer
  * This does not include exceptions thrown during the eager execution of 
commands, which are
  * grouped into [[QueryCompilationErrors]].
  */
-object QueryExecutionErrors {
+object QueryExecutionErrors extends QueryErrorsBase {
 
   def logicalHintOperatorNotRemovedDuringAnalysisError(): Throwable = {
     new SparkIllegalStateException(errorClass = "INTERNAL_ERROR",
@@ -91,7 +91,7 @@ object QueryExecutionErrors {
 
   def castingCauseOverflowError(t: Any, dataType: DataType): 
ArithmeticException = {
     new SparkArithmeticException(errorClass = "CAST_CAUSES_OVERFLOW",
-      messageParameters = Array(t.toString, dataType.catalogString, 
SQLConf.ANSI_ENABLED.key))
+      messageParameters = Array(toSQLValue(t), dataType.catalogString, 
SQLConf.ANSI_ENABLED.key))
   }
 
   def cannotChangeDecimalPrecisionError(
@@ -111,7 +111,7 @@ object QueryExecutionErrors {
 
   def invalidInputSyntaxForNumericError(s: UTF8String): NumberFormatException 
= {
     new SparkNumberFormatException(errorClass = 
"INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE",
-      messageParameters = Array(s.toString, SQLConf.ANSI_ENABLED.key))
+      messageParameters = Array(toSQLValue(s, StringType), 
SQLConf.ANSI_ENABLED.key))
   }
 
   def cannotCastFromNullTypeError(to: DataType): Throwable = {
@@ -158,14 +158,16 @@ object QueryExecutionErrors {
       numElements: Int,
       key: String): ArrayIndexOutOfBoundsException = {
     new SparkArrayIndexOutOfBoundsException(errorClass = "INVALID_ARRAY_INDEX",
-      messageParameters = Array(index.toString, numElements.toString, key))
+      messageParameters = Array(toSQLValue(index), toSQLValue(numElements), 
key))
   }
 
   def invalidElementAtIndexError(
        index: Int,
        numElements: Int): ArrayIndexOutOfBoundsException = {
-    new SparkArrayIndexOutOfBoundsException(errorClass = 
"INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
-      messageParameters = Array(index.toString, numElements.toString, 
SQLConf.ANSI_ENABLED.key))
+    new SparkArrayIndexOutOfBoundsException(
+      errorClass = "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
+      messageParameters =
+        Array(toSQLValue(index), toSQLValue(numElements), 
SQLConf.ANSI_ENABLED.key))
   }
 
   def mapKeyNotExistError(
@@ -174,10 +176,10 @@ object QueryExecutionErrors {
       context: String): NoSuchElementException = {
     if (isElementAtFunction) {
       new SparkNoSuchElementException(errorClass = 
"MAP_KEY_DOES_NOT_EXIST_IN_ELEMENT_AT",
-        messageParameters = Array(key.toString, SQLConf.ANSI_ENABLED.key, 
context))
+        messageParameters = Array(toSQLValue(key), SQLConf.ANSI_ENABLED.key, 
context))
     } else {
       new SparkNoSuchElementException(errorClass = "MAP_KEY_DOES_NOT_EXIST",
-        messageParameters = Array(key.toString, 
SQLConf.ANSI_STRICT_INDEX_OPERATOR.key, context))
+        messageParameters = Array(toSQLValue(key), 
SQLConf.ANSI_STRICT_INDEX_OPERATOR.key, context))
     }
   }
 
@@ -441,12 +443,12 @@ object QueryExecutionErrors {
   }
 
   def unaryMinusCauseOverflowError(originValue: AnyVal): ArithmeticException = 
{
-    arithmeticOverflowError(s"- $originValue caused overflow")
+    arithmeticOverflowError(s"- ${toSQLValue(originValue)} caused overflow")
   }
 
   def binaryArithmeticCauseOverflowError(
       eval1: Short, symbol: String, eval2: Short): ArithmeticException = {
-    arithmeticOverflowError(s"$eval1 $symbol $eval2 caused overflow")
+    arithmeticOverflowError(s"${toSQLValue(eval1)} $symbol 
${toSQLValue(eval2)} caused overflow")
   }
 
   def failedToCompileMsg(e: Exception): String = {
@@ -1033,7 +1035,7 @@ object QueryExecutionErrors {
   def cannotParseStringAsDataTypeError(pattern: String, value: String, 
dataType: DataType)
   : Throwable = {
     new RuntimeException(
-      s"Cannot parse field value ${value} for pattern ${pattern} " +
+      s"Cannot parse field value ${toSQLValue(value)} for pattern 
${toSQLValue(pattern)} " +
         s"as target spark data type [$dataType].")
   }
 
@@ -1098,7 +1100,7 @@ object QueryExecutionErrors {
   }
 
   def paramIsNotIntegerError(paramName: String, value: String): Throwable = {
-    new RuntimeException(s"$paramName should be an integer. Found $value")
+    new RuntimeException(s"$paramName should be an integer. Found 
${toSQLValue(value)}")
   }
 
   def paramIsNotBooleanValueError(paramName: String): Throwable = {
@@ -1298,7 +1300,7 @@ object QueryExecutionErrors {
   }
 
   def indexOutOfBoundsOfArrayDataError(idx: Int): Throwable = {
-    new SparkIndexOutOfBoundsException(errorClass = "INDEX_OUT_OF_BOUNDS", 
Array(idx.toString))
+    new SparkIndexOutOfBoundsException(errorClass = "INDEX_OUT_OF_BOUNDS", 
Array(toSQLValue(idx)))
   }
 
   def malformedRecordsDetectedInRecordParsingError(e: BadRecordException): 
Throwable = {
@@ -1335,7 +1337,8 @@ object QueryExecutionErrors {
   }
 
   def dynamicPartitionKeyNotAmongWrittenPartitionPathsError(key: String): 
Throwable = {
-    new SparkException(s"Dynamic partition key $key is not among written 
partition paths.")
+    new SparkException(
+      s"Dynamic partition key ${toSQLValue(key)} is not among written 
partition paths.")
   }
 
   def cannotRemovePartitionDirError(partitionPath: Path): Throwable = {
@@ -1618,7 +1621,7 @@ object QueryExecutionErrors {
   }
 
   def valueIsNullError(index: Int): Throwable = {
-    new NullPointerException(s"Value at index $index is null")
+    new NullPointerException(s"Value at index ${toSQLValue(index)} is null")
   }
 
   def onlySupportDataSourcesProvidingFileFormatError(providingClass: String): 
Throwable = {
@@ -1962,6 +1965,7 @@ object QueryExecutionErrors {
   def timestampAddOverflowError(micros: Long, amount: Int, unit: String): 
ArithmeticException = {
     new SparkArithmeticException(
       errorClass = "DATETIME_OVERFLOW",
-      messageParameters = Array(s"add $amount $unit to 
'${DateTimeUtils.microsToInstant(micros)}'"))
+      messageParameters = Array(
+        s"add ${toSQLValue(amount)} $unit to 
${toSQLValue(DateTimeUtils.microsToInstant(micros))}"))
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
index 7fb04fe8b7f..6494fb29fda 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
@@ -175,28 +175,28 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
     // cast to IntegerType
     Seq(IntegerType, ShortType, ByteType, LongType).foreach { dataType =>
       checkExceptionInExpression[NumberFormatException](
-        cast("string", dataType), "invalid input syntax for type numeric: 
string")
+        cast("string", dataType), "invalid input syntax for type numeric: 
'string'")
       checkExceptionInExpression[NumberFormatException](
-        cast("123-string", dataType), "invalid input syntax for type numeric: 
123-string")
+        cast("123-string", dataType), "invalid input syntax for type numeric: 
'123-string'")
       checkExceptionInExpression[NumberFormatException](
-        cast("2020-07-19", dataType), "invalid input syntax for type numeric: 
2020-07-19")
+        cast("2020-07-19", dataType), "invalid input syntax for type numeric: 
'2020-07-19'")
       checkExceptionInExpression[NumberFormatException](
-        cast("1.23", dataType), "invalid input syntax for type numeric: 1.23")
+        cast("1.23", dataType), "invalid input syntax for type numeric: 
'1.23'")
     }
 
     Seq(DoubleType, FloatType, DecimalType.USER_DEFAULT).foreach { dataType =>
       checkExceptionInExpression[NumberFormatException](
-        cast("string", dataType), "invalid input syntax for type numeric: 
string")
+        cast("string", dataType), "invalid input syntax for type numeric: 
'string'")
       checkExceptionInExpression[NumberFormatException](
-        cast("123.000.00", dataType), "invalid input syntax for type numeric: 
123.000.00")
+        cast("123.000.00", dataType), "invalid input syntax for type numeric: 
'123.000.00'")
       checkExceptionInExpression[NumberFormatException](
-        cast("abc.com", dataType), "invalid input syntax for type numeric: 
abc.com")
+        cast("abc.com", dataType), "invalid input syntax for type numeric: 
'abc.com'")
     }
   }
 
   protected def checkCastToNumericError(l: Literal, to: DataType, 
tryCastResult: Any): Unit = {
     checkExceptionInExpression[NumberFormatException](
-      cast(l, to), "invalid input syntax for type numeric: true")
+      cast(l, to), "invalid input syntax for type numeric: 'true'")
   }
 
   test("cast from invalid string array to numeric array should throw 
NumberFormatException") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
index ca110502c6b..b6c347cfedb 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
@@ -600,7 +600,7 @@ class CastSuite extends CastSuiteBase {
       val e3 = intercept[ArithmeticException] {
         Cast(Literal(Int.MaxValue + 1L), IntegerType).eval()
       }.getMessage
-      assert(e3.contains("Casting 2147483648 to int causes overflow"))
+      assert(e3.contains("Casting 2147483648L to int causes overflow"))
     }
   }
 
@@ -773,7 +773,14 @@ class CastSuite extends CastSuiteBase {
 
     Seq(
       (Int.MaxValue, DayTimeIntervalType(DAY)),
-      (Int.MinValue, DayTimeIntervalType(DAY)),
+      (Int.MinValue, DayTimeIntervalType(DAY))
+    ).foreach {
+      case (v, toType) =>
+        checkExceptionInExpression[ArithmeticException](cast(v, toType),
+          s"Casting $v to ${toType.catalogString} causes overflow")
+    }
+
+    Seq(
       (Long.MaxValue, DayTimeIntervalType(DAY)),
       (Long.MinValue, DayTimeIntervalType(DAY)),
       (Long.MaxValue, DayTimeIntervalType(HOUR)),
@@ -785,7 +792,7 @@ class CastSuite extends CastSuiteBase {
     ).foreach {
       case (v, toType) =>
         checkExceptionInExpression[ArithmeticException](cast(v, toType),
-          s"Casting $v to ${toType.catalogString} causes overflow")
+          s"Casting ${v}L to ${toType.catalogString} causes overflow")
     }
   }
 
@@ -876,7 +883,14 @@ class CastSuite extends CastSuiteBase {
 
     Seq(
       (Int.MaxValue, YearMonthIntervalType(YEAR)),
-      (Int.MinValue, YearMonthIntervalType(YEAR)),
+      (Int.MinValue, YearMonthIntervalType(YEAR))
+    ).foreach {
+      case (v, toType) =>
+        checkExceptionInExpression[ArithmeticException](cast(v, toType),
+          s"Casting $v to ${toType.catalogString} causes overflow")
+    }
+
+    Seq(
       (Long.MaxValue, YearMonthIntervalType(YEAR)),
       (Long.MinValue, YearMonthIntervalType(YEAR)),
       (Long.MaxValue, YearMonthIntervalType(MONTH)),
@@ -884,7 +898,7 @@ class CastSuite extends CastSuiteBase {
     ).foreach {
       case (v, toType) =>
         checkExceptionInExpression[ArithmeticException](cast(v, toType),
-          s"Casting $v to ${toType.catalogString} causes overflow")
+          s"Casting ${v}L to ${toType.catalogString} causes overflow")
     }
   }
 }
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 6e45fe8dce9..6b705274dc8 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -8,7 +8,7 @@ SELECT CAST('1.23' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1.23. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -17,7 +17,7 @@ SELECT CAST('1.23' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1.23. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -26,7 +26,7 @@ SELECT CAST('-4.56' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: -4.56. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -35,7 +35,7 @@ SELECT CAST('-4.56' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: -4.56. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -44,7 +44,7 @@ SELECT CAST('abc' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: abc. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -53,7 +53,7 @@ SELECT CAST('abc' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: abc. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -62,7 +62,7 @@ SELECT CAST('1234567890123' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1234567890123. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1234567890123'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -71,7 +71,7 @@ SELECT CAST('12345678901234567890123' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 12345678901234567890123. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+invalid input syntax for type numeric: '12345678901234567890123'. To return 
NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false 
to bypass this error.
 
 
 -- !query
@@ -80,7 +80,7 @@ SELECT CAST('' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: . To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -89,7 +89,7 @@ SELECT CAST('' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: . To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -114,7 +114,7 @@ SELECT CAST('123.a' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 123.a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -123,7 +123,7 @@ SELECT CAST('123.a' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 123.a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -140,7 +140,7 @@ SELECT CAST('-2147483649' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: -2147483649. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '-2147483649'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -157,7 +157,7 @@ SELECT CAST('2147483648' AS int)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 2147483648. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '2147483648'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -174,7 +174,7 @@ SELECT CAST('-9223372036854775809' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: -9223372036854775809. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+invalid input syntax for type numeric: '-9223372036854775809'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -191,7 +191,7 @@ SELECT CAST('9223372036854775808' AS long)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 9223372036854775808. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+invalid input syntax for type numeric: '9223372036854775808'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -447,7 +447,7 @@ select cast('1中文' as tinyint)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1中文. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -456,7 +456,7 @@ select cast('1中文' as smallint)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1中文. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -465,7 +465,7 @@ select cast('1中文' as INT)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1中文. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -474,7 +474,7 @@ select cast('中文1' as bigint)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 中文1. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '中文1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -483,7 +483,7 @@ select cast('1中文' as bigint)
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1中文. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index 98b1ec42a79..c7058cd7e3b 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -324,7 +324,7 @@ select date_add('2011-11-11', '1.2')
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1.2. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -432,7 +432,7 @@ select date_sub(date'2011-11-11', '1.2')
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: 1.2. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 1f822638432..8f88727f66f 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -122,7 +122,7 @@ select interval 2 second * 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -131,7 +131,7 @@ select interval 2 second / 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -140,7 +140,7 @@ select interval 2 year * 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -149,7 +149,7 @@ select interval 2 year / 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -174,7 +174,7 @@ select 'a' * interval 2 second
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -183,7 +183,7 @@ select 'a' * interval 2 year
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index 52d70e22a44..76276340892 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -82,7 +82,7 @@ select left("abcd", -2), left("abcd", 0), left("abcd", 'a')
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -107,7 +107,7 @@ select right("abcd", -2), right("abcd", 0), right("abcd", 
'a')
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: a. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -413,7 +413,7 @@ SELECT lpad('hi', 'invalid_length')
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: invalid_length. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'invalid_length'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -422,7 +422,7 @@ SELECT rpad('hi', 'invalid_length')
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: invalid_length. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'invalid_length'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
index 690fd7cd2cb..eccfdbae757 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
@@ -96,7 +96,7 @@ SELECT float('N A N')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: N A N. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -105,7 +105,7 @@ SELECT float('NaN x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: NaN x. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -114,7 +114,7 @@ SELECT float(' INFINITY    x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric:  INFINITY    x. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -147,7 +147,7 @@ SELECT float(decimal('nan'))
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: nan. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'nan'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
index 2b71be5a5d9..d143e1f1c59 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
@@ -128,7 +128,7 @@ SELECT double('N A N')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: N A N. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -137,7 +137,7 @@ SELECT double('NaN x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: NaN x. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -146,7 +146,7 @@ SELECT double(' INFINITY    x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric:  INFINITY    x. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -179,7 +179,7 @@ SELECT double(decimal('nan'))
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: nan. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'nan'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -833,7 +833,7 @@ SELECT bigint(double('-9223372036854780000'))
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting -9.22337203685478E18 to bigint causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Casting -9.22337203685478E18D to bigint causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
index cc524b575d3..b7185fcbf1f 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
@@ -619,7 +619,7 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 4567890123456789 to int causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting 4567890123456789L to int causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -636,7 +636,7 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 4567890123456789 to smallint causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting 4567890123456789L to smallint causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -673,7 +673,7 @@ SELECT CAST(double('922337203685477580700.0') AS bigint)
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 9.223372036854776E20 to bigint causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Casting 9.223372036854776E20D to bigint causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -745,7 +745,7 @@ SELECT string(int(shiftleft(bigint(-1), 63))+1)
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting -9223372036854775808 to int causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting -9223372036854775808L to int causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
index 56f50ec3a1d..9f9f212c731 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
@@ -65,7 +65,7 @@ select string('four: ') || 2+2
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: four: 2. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -74,7 +74,7 @@ select 'four: ' || 2+2
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: four: 2. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
index 75c40ce92d2..158196e7c82 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
@@ -462,7 +462,7 @@ window w as (order by f_numeric range between
 struct<>
 -- !query output
 java.lang.NumberFormatException
-invalid input syntax for type numeric: NaN. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+invalid input syntax for type numeric: 'NaN'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
index 691df3c45a2..c937d663771 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
@@ -501,4 +501,4 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as 
int))),(4,3),(5,4)) t(a,b)
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('nan' AS INT): invalid input syntax for 
type numeric: nan. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.; line 3 pos 6
+failed to evaluate expression CAST('nan' AS INT): invalid input syntax for 
type numeric: 'nan'. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.; line 3 pos 6
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
index 274da57fb7b..ab5c66dfec7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
@@ -314,7 +314,7 @@ trait SQLInsertTestSuite extends QueryTest with 
SQLTestUtils {
             val errorMsg = intercept[NumberFormatException] {
               sql("insert into t partition(a='ansi') values('ansi')")
             }.getMessage
-            assert(errorMsg.contains("invalid input syntax for type numeric: 
ansi"))
+            assert(errorMsg.contains("invalid input syntax for type numeric: 
'ansi'"))
           } else {
             sql("insert into t partition(a='ansi') values('ansi')")
             checkAnswer(sql("select * from t"), Row("ansi", null) :: Nil)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 9268be43ba4..a7625e17b4a 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -276,6 +276,6 @@ class QueryExecutionErrorsSuite extends QueryTest
     assert(e.getErrorClass === "DATETIME_OVERFLOW")
     assert(e.getSqlState === "22008")
     assert(e.getMessage ===
-      "Datetime operation overflow: add 1000000 YEAR to 
'2022-03-09T09:02:03Z'.")
+      "Datetime operation overflow: add 1000000 YEAR to TIMESTAMP '2022-03-09 
01:02:03'.")
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
index 222e195719d..8d921d47e83 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
@@ -721,13 +721,13 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
         var msg = intercept[SparkException] {
           sql(s"insert into t values($outOfRangeValue1)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting $outOfRangeValue1 to int causes 
overflow"))
+        assert(msg.contains(s"Casting ${outOfRangeValue1}L to int causes 
overflow"))
 
         val outOfRangeValue2 = (Int.MinValue - 1L).toString
         msg = intercept[SparkException] {
           sql(s"insert into t values($outOfRangeValue2)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting $outOfRangeValue2 to int causes 
overflow"))
+        assert(msg.contains(s"Casting ${outOfRangeValue2}L to int causes 
overflow"))
       }
     }
   }
@@ -741,13 +741,13 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
         var msg = intercept[SparkException] {
           sql(s"insert into t values(${outOfRangeValue1}D)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting $outOfRangeValue1 to bigint causes 
overflow"))
+        assert(msg.contains(s"Casting ${outOfRangeValue1}D to bigint causes 
overflow"))
 
         val outOfRangeValue2 = Math.nextDown(Long.MinValue)
         msg = intercept[SparkException] {
           sql(s"insert into t values(${outOfRangeValue2}D)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting $outOfRangeValue2 to bigint causes 
overflow"))
+        assert(msg.contains(s"Casting ${outOfRangeValue2}D to bigint causes 
overflow"))
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to