This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new f76b3e766f7 [SPARK-38929][SQL] Improve error messages for cast
failures in ANSI
f76b3e766f7 is described below
commit f76b3e766f79b4c2d4f1ecffaad25aeb962336b7
Author: Xinyi Yu <[email protected]>
AuthorDate: Tue Apr 19 22:25:09 2022 +0300
[SPARK-38929][SQL] Improve error messages for cast failures in ANSI
### What changes were proposed in this pull request?
Improve the error messages for cast failures in ANSI.
As mentioned in https://issues.apache.org/jira/browse/SPARK-38929, this PR
targets two cast-to types: numeric types and date types.
* For numeric(`int`, `smallint`, `double`, `float`, `decimal` ..) types, it
embeds the cast-to types in the error message. For example,
```
Invalid input value for type INT: '1.0'. To return NULL instead, use
'try_cast'. If necessary set %s to false to bypass this error.
```
It uses the `toSQLType` and `toSQLValue` to wrap the corresponding types
and literals.
* For date types, it does similarly as above. For example,
```
Invalid input value for type TIMESTAMP: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
```
### Why are the changes needed?
To improve the error message in general.
### Does this PR introduce _any_ user-facing change?
It changes the error messages.
### How was this patch tested?
The related unit tests are updated.
Closes #36241 from anchovYu/ansi-error-improve.
Authored-by: Xinyi Yu <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
core/src/main/resources/error/error-classes.json | 8 +-
.../spark/sql/catalyst/expressions/Cast.scala | 17 ++-
.../spark/sql/catalyst/util/UTF8StringUtils.scala | 13 +-
.../spark/sql/errors/QueryExecutionErrors.scala | 16 +-
.../scala/org/apache/spark/sql/types/Decimal.scala | 7 +-
.../catalyst/expressions/AnsiCastSuiteBase.scala | 58 +++----
.../sql/catalyst/expressions/TryCastSuite.scala | 3 +-
.../sql/catalyst/util/DateFormatterSuite.scala | 2 +-
.../catalyst/util/TimestampFormatterSuite.scala | 2 +-
.../org/apache/spark/sql/types/DecimalSuite.scala | 3 +-
.../src/test/resources/sql-tests/inputs/cast.sql | 10 +-
.../resources/sql-tests/results/ansi/cast.sql.out | 170 +++++++++++++++------
.../resources/sql-tests/results/ansi/date.sql.out | 10 +-
.../results/ansi/datetime-parsing-invalid.sql.out | 4 +-
.../sql-tests/results/ansi/interval.sql.out | 20 +--
.../results/ansi/string-functions.sql.out | 16 +-
.../test/resources/sql-tests/results/cast.sql.out | 50 +++++-
.../sql-tests/results/postgreSQL/float4.sql.out | 8 +-
.../sql-tests/results/postgreSQL/float8.sql.out | 8 +-
.../sql-tests/results/postgreSQL/text.sql.out | 8 +-
.../results/postgreSQL/window_part2.sql.out | 4 +-
.../results/postgreSQL/window_part3.sql.out | 2 +-
.../results/postgreSQL/window_part4.sql.out | 2 +-
.../sql-tests/results/string-functions.sql.out | 2 +-
.../results/timestampNTZ/timestamp-ansi.sql.out | 2 +-
.../org/apache/spark/sql/SQLInsertTestSuite.scala | 2 +-
26 files changed, 297 insertions(+), 150 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 26d75fa675e..23c1cee1c72 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -105,10 +105,6 @@
"message" : [ "The fraction of sec must be zero. Valid range is [0, 60].
If necessary set %s to false to bypass this error. " ],
"sqlState" : "22023"
},
- "INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE" : {
- "message" : [ "invalid input syntax for type numeric: %s. To return NULL
instead, use 'try_cast'. If necessary set %s to false to bypass this error.%s"
],
- "sqlState" : "42000"
- },
"INVALID_JSON_SCHEMA_MAPTYPE" : {
"message" : [ "Input schema %s can only contain StringType as a key type
for a MapType." ]
},
@@ -123,6 +119,10 @@
"message" : [ "Invalid SQL syntax: %s" ],
"sqlState" : "42000"
},
+ "INVALID_SYNTAX_FOR_CAST" : {
+ "message" : [ "Invalid input syntax for type %s: %s. To return NULL
instead, use 'try_cast'. If necessary set %s to false to bypass this error.%s"
],
+ "sqlState" : "42000"
+ },
"MAP_KEY_DOES_NOT_EXIST" : {
"message" : [ "Key %s does not exist. If necessary set %s to false to
bypass this error.%s" ]
},
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index e522c211cb2..865202caa5f 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -816,7 +816,7 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
})
case StringType if ansiEnabled =>
buildCast[UTF8String](_,
- s => changePrecision(Decimal.fromStringANSI(s, origin.context),
target))
+ s => changePrecision(Decimal.fromStringANSI(s, target,
origin.context), target))
case BooleanType =>
buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else
Decimal.ZERO, target))
case DateType =>
@@ -845,7 +845,8 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
case _: NumberFormatException =>
val d = Cast.processFloatingPointSpecialLiterals(doubleStr, false)
if(ansiEnabled && d == null) {
- throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s,
origin.context)
+ throw QueryExecutionErrors.invalidInputSyntaxForNumericError(
+ DoubleType, s, origin.context)
} else {
d
}
@@ -870,7 +871,8 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
case _: NumberFormatException =>
val f = Cast.processFloatingPointSpecialLiterals(floatStr, true)
if (ansiEnabled && f == null) {
- throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s,
origin.context)
+ throw QueryExecutionErrors.invalidInputSyntaxForNumericError(
+ FloatType, s, origin.context)
} else {
f
}
@@ -1376,9 +1378,10 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
"""
case StringType if ansiEnabled =>
val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+ val toType = ctx.addReferenceObj("toType", target)
(c, evPrim, evNull) =>
code"""
- Decimal $tmp = Decimal.fromStringANSI($c, $errorContext);
+ Decimal $tmp = Decimal.fromStringANSI($c, $toType,
$errorContext);
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast,
ctx)}
"""
case BooleanType =>
@@ -1899,7 +1902,8 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
(c, evPrim, evNull) =>
val handleNull = if (ansiEnabled) {
val errorContext = ctx.addReferenceObj("errCtx", origin.context)
- s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError($c,
$errorContext);"
+ s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError(" +
+ s"org.apache.spark.sql.types.FloatType$$.MODULE$$,$c,
$errorContext);"
} else {
s"$evNull = true;"
}
@@ -1936,7 +1940,8 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
(c, evPrim, evNull) =>
val handleNull = if (ansiEnabled) {
val errorContext = ctx.addReferenceObj("errCtx", origin.context)
- s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError($c,
$errorContext);"
+ s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError(" +
+ s"org.apache.spark.sql.types.DoubleType$$.MODULE$$, $c,
$errorContext);"
} else {
s"$evNull = true;"
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
index 9589cf3774e..c01fcbe6ca2 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.catalyst.util
import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.types.{ByteType, DataType, IntegerType, LongType,
ShortType}
import org.apache.spark.unsafe.types.UTF8String
/**
@@ -26,23 +27,23 @@ import org.apache.spark.unsafe.types.UTF8String
object UTF8StringUtils {
def toLongExact(s: UTF8String, errorContext: String): Long =
- withException(s.toLongExact, errorContext)
+ withException(s.toLongExact, errorContext, LongType, s)
def toIntExact(s: UTF8String, errorContext: String): Int =
- withException(s.toIntExact, errorContext)
+ withException(s.toIntExact, errorContext, IntegerType, s)
def toShortExact(s: UTF8String, errorContext: String): Short =
- withException(s.toShortExact, errorContext)
+ withException(s.toShortExact, errorContext, ShortType, s)
def toByteExact(s: UTF8String, errorContext: String): Byte =
- withException(s.toByteExact, errorContext)
+ withException(s.toByteExact, errorContext, ByteType, s)
- private def withException[A](f: => A, errorContext: String): A = {
+ private def withException[A](f: => A, errorContext: String, to: DataType, s:
UTF8String): A = {
try {
f
} catch {
case e: NumberFormatException =>
- throw QueryExecutionErrors.invalidInputSyntaxForNumericError(e,
errorContext)
+ throw QueryExecutionErrors.invalidInputSyntaxForNumericError(to, s,
errorContext)
}
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 79e36cb485a..20be5de7bc6 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -112,10 +112,12 @@ object QueryExecutionErrors extends QueryErrorsBase {
}
def invalidInputSyntaxForNumericError(
+ to: DataType,
s: UTF8String,
errorContext: String): NumberFormatException = {
- new SparkNumberFormatException(errorClass =
"INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE",
- messageParameters = Array(toSQLValue(s, StringType),
SQLConf.ANSI_ENABLED.key, errorContext))
+ new SparkNumberFormatException(errorClass = "INVALID_SYNTAX_FOR_CAST",
+ messageParameters = Array(toSQLType(to), toSQLValue(s, StringType),
+ SQLConf.ANSI_ENABLED.key, errorContext))
}
def cannotCastFromNullTypeError(to: DataType): Throwable = {
@@ -1011,8 +1013,14 @@ object QueryExecutionErrors extends QueryErrorsBase {
}
def cannotCastToDateTimeError(value: Any, to: DataType, errorContext:
String): Throwable = {
- new DateTimeException(s"Cannot cast $value to $to. To return NULL instead,
use 'try_cast'. " +
- s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this
error." + errorContext)
+ val valueString = if (value.isInstanceOf[UTF8String]) {
+ toSQLValue(value, StringType)
+ } else {
+ toSQLValue(value)
+ }
+ new DateTimeException(s"Invalid input syntax for type ${toSQLType(to)}:
$valueString. " +
+ s"To return NULL instead, use 'try_cast'. If necessary set
${SQLConf.ANSI_ENABLED.key} " +
+ s"to false to bypass this error." + errorContext)
}
def registeringStreamingQueryListenerError(e: Exception): Throwable = {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index ac6ac33451c..22e57fae52d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -613,7 +613,10 @@ object Decimal {
}
}
- def fromStringANSI(str: UTF8String, errorContext: String = ""): Decimal = {
+ def fromStringANSI(
+ str: UTF8String,
+ to: DecimalType = DecimalType.USER_DEFAULT,
+ errorContext: String = ""): Decimal = {
try {
val bigDecimal = stringToJavaBigDecimal(str)
// We fast fail because constructing a very large JavaBigDecimal to
Decimal is very slow.
@@ -626,7 +629,7 @@ object Decimal {
}
} catch {
case _: NumberFormatException =>
- throw QueryExecutionErrors.invalidInputSyntaxForNumericError(str,
errorContext)
+ throw QueryExecutionErrors.invalidInputSyntaxForNumericError(to, str,
errorContext)
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
index 785fd95692e..9be144efd77 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
@@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.DateTimeConstants.MILLIS_PER_SECOND
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
import
org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{withDefaultTimeZone, UTC}
+import org.apache.spark.sql.errors.QueryExecutionErrors.toSQLValue
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
@@ -174,42 +175,43 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
test("cast from invalid string to numeric should throw
NumberFormatException") {
// cast to IntegerType
Seq(IntegerType, ShortType, ByteType, LongType).foreach { dataType =>
- checkExceptionInExpression[NumberFormatException](
- cast("string", dataType), "invalid input syntax for type numeric:
'string'")
- checkExceptionInExpression[NumberFormatException](
- cast("123-string", dataType), "invalid input syntax for type numeric:
'123-string'")
- checkExceptionInExpression[NumberFormatException](
- cast("2020-07-19", dataType), "invalid input syntax for type numeric:
'2020-07-19'")
- checkExceptionInExpression[NumberFormatException](
- cast("1.23", dataType), "invalid input syntax for type numeric:
'1.23'")
+ checkExceptionInExpression[NumberFormatException](cast("string",
dataType),
+ s"Invalid input syntax for type ${dataType.sql}: 'string'")
+ checkExceptionInExpression[NumberFormatException](cast("123-string",
dataType),
+ s"Invalid input syntax for type ${dataType.sql}: '123-string'")
+ checkExceptionInExpression[NumberFormatException](cast("2020-07-19",
dataType),
+ s"Invalid input syntax for type ${dataType.sql}: '2020-07-19'")
+ checkExceptionInExpression[NumberFormatException](cast("1.23", dataType),
+ s"Invalid input syntax for type ${dataType.sql}: '1.23'")
}
Seq(DoubleType, FloatType, DecimalType.USER_DEFAULT).foreach { dataType =>
- checkExceptionInExpression[NumberFormatException](
- cast("string", dataType), "invalid input syntax for type numeric:
'string'")
- checkExceptionInExpression[NumberFormatException](
- cast("123.000.00", dataType), "invalid input syntax for type numeric:
'123.000.00'")
- checkExceptionInExpression[NumberFormatException](
- cast("abc.com", dataType), "invalid input syntax for type numeric:
'abc.com'")
+ checkExceptionInExpression[NumberFormatException](cast("string",
dataType),
+ s"Invalid input syntax for type ${dataType.sql}: 'string'")
+ checkExceptionInExpression[NumberFormatException](cast("123.000.00",
dataType),
+ s"Invalid input syntax for type ${dataType.sql}: '123.000.00'")
+ checkExceptionInExpression[NumberFormatException](cast("abc.com",
dataType),
+ s"Invalid input syntax for type ${dataType.sql}: 'abc.com'")
}
}
- protected def checkCastToNumericError(l: Literal, to: DataType,
tryCastResult: Any): Unit = {
+ protected def checkCastToNumericError(l: Literal, to: DataType,
+ expectedDataTypeInErrorMsg: DataType, tryCastResult: Any): Unit = {
checkExceptionInExpression[NumberFormatException](
- cast(l, to), "invalid input syntax for type numeric: 'true'")
+ cast(l, to), s"Invalid input syntax for type
${expectedDataTypeInErrorMsg.sql}: 'true'")
}
test("cast from invalid string array to numeric array should throw
NumberFormatException") {
val array = Literal.create(Seq("123", "true", "f", null),
ArrayType(StringType, containsNull = true))
- checkCastToNumericError(array, ArrayType(ByteType, containsNull = true),
+ checkCastToNumericError(array, ArrayType(ByteType, containsNull = true),
ByteType,
Seq(123.toByte, null, null, null))
- checkCastToNumericError(array, ArrayType(ShortType, containsNull = true),
+ checkCastToNumericError(array, ArrayType(ShortType, containsNull = true),
ShortType,
Seq(123.toShort, null, null, null))
- checkCastToNumericError(array, ArrayType(IntegerType, containsNull = true),
+ checkCastToNumericError(array, ArrayType(IntegerType, containsNull =
true), IntegerType,
Seq(123, null, null, null))
- checkCastToNumericError(array, ArrayType(LongType, containsNull = true),
+ checkCastToNumericError(array, ArrayType(LongType, containsNull = true),
LongType,
Seq(123L, null, null, null))
}
@@ -243,7 +245,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
checkExceptionInExpression[NumberFormatException](
cast("abcd", DecimalType(38, 1)),
- "invalid input syntax for type numeric")
+ s"Invalid input syntax for type ${DecimalType(38, 1).sql}: 'abcd'")
}
protected def checkCastToBooleanError(l: Literal, to: DataType,
tryCastResult: Any): Unit = {
@@ -258,7 +260,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
protected def checkCastToTimestampError(l: Literal, to: DataType): Unit = {
checkExceptionInExpression[DateTimeException](
- cast(l, to), s"Cannot cast $l to $to")
+ cast(l, to), s"Invalid input syntax for type TIMESTAMP:
${toSQLValue(l)}")
}
test("cast from timestamp II") {
@@ -369,7 +371,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
assert(ret.resolved == !isTryCast)
if (!isTryCast) {
checkExceptionInExpression[NumberFormatException](
- ret, "invalid input syntax for type numeric")
+ ret, s"Invalid input syntax for type ${IntegerType.sql}")
}
}
@@ -387,7 +389,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
assert(ret.resolved == !isTryCast)
if (!isTryCast) {
checkExceptionInExpression[NumberFormatException](
- ret, "invalid input syntax for type numeric")
+ ret, s"Invalid input syntax for type ${IntegerType.sql}")
}
}
}
@@ -512,7 +514,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
assert(ret.resolved === !isTryCast)
if (!isTryCast) {
checkExceptionInExpression[NumberFormatException](
- ret, "invalid input syntax for type numeric")
+ ret, s"Invalid input syntax for type ${IntegerType.sql}")
}
}
@@ -521,7 +523,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
def checkCastWithParseError(str: String): Unit = {
checkExceptionInExpression[DateTimeException](
cast(Literal(str), TimestampType, Option(zid.getId)),
- s"Cannot cast $str to TimestampType.")
+ s"Invalid input syntax for type TIMESTAMP: '$str'")
}
checkCastWithParseError("123")
@@ -542,7 +544,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
def checkCastWithParseError(str: String): Unit = {
checkExceptionInExpression[DateTimeException](
cast(Literal(str), DateType, Option(zid.getId)),
- s"Cannot cast $str to DateType.")
+ s"Invalid input syntax for type DATE: '$str'")
}
checkCastWithParseError("2015-13-18")
@@ -570,7 +572,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
"2021-06-17 00:00:00ABC").foreach { invalidInput =>
checkExceptionInExpression[DateTimeException](
cast(invalidInput, TimestampNTZType),
- s"Cannot cast $invalidInput to TimestampNTZType")
+ s"Invalid input syntax for type TIMESTAMP_NTZ: '$invalidInput'")
}
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryCastSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryCastSuite.scala
index 1394ec8c8e2..bb9ab888947 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryCastSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryCastSuite.scala
@@ -45,7 +45,8 @@ class TryCastSuite extends AnsiCastSuiteBase {
checkEvaluation(cast(l, to), tryCastResult, InternalRow(l.value))
}
- override def checkCastToNumericError(l: Literal, to: DataType,
tryCastResult: Any): Unit = {
+ override def checkCastToNumericError(l: Literal, to: DataType,
+ expectedDataTypeInErrorMsg: DataType, tryCastResult: Any): Unit = {
checkEvaluation(cast(l, to), tryCastResult, InternalRow(l.value))
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
index 44c90db7630..71351f6263f 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
@@ -208,6 +208,6 @@ class DateFormatterSuite extends DatetimeFormatterSuite {
val errMsg = intercept[DateTimeException] {
formatter.parse("x123")
}.getMessage
- assert(errMsg.contains("Cannot cast x123 to DateType"))
+ assert(errMsg.contains("Invalid input syntax for type DATE: 'x123'"))
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
index 661e624efa5..204fe93e2d1 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
@@ -453,7 +453,7 @@ class TimestampFormatterSuite extends
DatetimeFormatterSuite {
val errMsg = intercept[DateTimeException] {
formatter.parse("x123")
}.getMessage
- assert(errMsg.contains("Cannot cast x123 to TimestampType"))
+ assert(errMsg.contains("Invalid input syntax for type TIMESTAMP:
'x123'"))
}
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index 5433c561a03..77b07ce533e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -284,7 +284,8 @@ class DecimalSuite extends SparkFunSuite with
PrivateMethodTester with SQLHelper
assert(Decimal.fromString(UTF8String.fromString("str")) === null)
val e =
intercept[NumberFormatException](Decimal.fromStringANSI(UTF8String.fromString("str")))
- assert(e.getMessage.contains("invalid input syntax for type numeric"))
+ assert(e.getMessage.contains("Invalid input syntax for type " +
+ s"${DecimalType.USER_DEFAULT.sql}: 'str'"))
}
test("SPARK-35841: Casting string to decimal type doesn't work " +
diff --git a/sql/core/src/test/resources/sql-tests/inputs/cast.sql
b/sql/core/src/test/resources/sql-tests/inputs/cast.sql
index e391c31690f..4610716902e 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/cast.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/cast.sql
@@ -4,9 +4,11 @@ SELECT CAST('1.23' AS long);
SELECT CAST('-4.56' AS int);
SELECT CAST('-4.56' AS long);
--- cast string which are not numbers to integral should return null
+-- cast string which are not numbers to numeric types
SELECT CAST('abc' AS int);
SELECT CAST('abc' AS long);
+SELECT CAST('abc' AS float);
+SELECT CAST('abc' AS double);
-- cast string representing a very large number to integral should return null
SELECT CAST('1234567890123' AS int);
@@ -15,14 +17,18 @@ SELECT CAST('12345678901234567890123' AS long);
-- cast empty string to integral should return null
SELECT CAST('' AS int);
SELECT CAST('' AS long);
+SELECT CAST('' AS float);
+SELECT CAST('' AS double);
-- cast null to integral should return null
SELECT CAST(NULL AS int);
SELECT CAST(NULL AS long);
--- cast invalid decimal string to integral should return null
+-- cast invalid decimal string to numeric types
SELECT CAST('123.a' AS int);
SELECT CAST('123.a' AS long);
+SELECT CAST('123.a' AS float);
+SELECT CAST('123.a' AS double);
-- '-2147483648' is the smallest int value
SELECT CAST('-2147483648' AS int);
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 3de9c1f743d..a2cb4ca1125 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 66
+-- Number of queries: 72
-- !query
@@ -7,8 +7,8 @@ SELECT CAST('1.23' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1.23'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '1.23'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('1.23' AS int)
^^^^^^^^^^^^^^^^^^^
@@ -19,8 +19,8 @@ SELECT CAST('1.23' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1.23'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '1.23'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('1.23' AS long)
^^^^^^^^^^^^^^^^^^^^
@@ -31,8 +31,8 @@ SELECT CAST('-4.56' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '-4.56'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '-4.56'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('-4.56' AS int)
^^^^^^^^^^^^^^^^^^^^
@@ -43,8 +43,8 @@ SELECT CAST('-4.56' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '-4.56'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '-4.56'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('-4.56' AS long)
^^^^^^^^^^^^^^^^^^^^^
@@ -55,8 +55,8 @@ SELECT CAST('abc' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'abc'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: 'abc'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('abc' AS int)
^^^^^^^^^^^^^^^^^^
@@ -67,20 +67,44 @@ SELECT CAST('abc' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'abc'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: 'abc'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('abc' AS long)
^^^^^^^^^^^^^^^^^^^
+-- !query
+SELECT CAST('abc' AS float)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type FLOAT: 'abc'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('abc' AS float)
+ ^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+SELECT CAST('abc' AS double)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type DOUBLE: 'abc'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('abc' AS double)
+ ^^^^^^^^^^^^^^^^^^^^^
+
+
-- !query
SELECT CAST('1234567890123' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1234567890123'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '1234567890123'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('1234567890123' AS int)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -91,8 +115,8 @@ SELECT CAST('12345678901234567890123' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '12345678901234567890123'. To return
NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false
to bypass this error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '12345678901234567890123'. To return
NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false
to bypass this error.
== SQL(line 1, position 7) ==
SELECT CAST('12345678901234567890123' AS long)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -103,8 +127,8 @@ SELECT CAST('' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: ''. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: ''. To return NULL instead, use 'try_cast'.
If necessary set spark.sql.ansi.enabled to false to bypass this error.
== SQL(line 1, position 7) ==
SELECT CAST('' AS int)
^^^^^^^^^^^^^^^
@@ -115,13 +139,37 @@ SELECT CAST('' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: ''. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: ''. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('' AS long)
^^^^^^^^^^^^^^^^
+-- !query
+SELECT CAST('' AS float)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type FLOAT: ''. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('' AS float)
+ ^^^^^^^^^^^^^^^^^
+
+
+-- !query
+SELECT CAST('' AS double)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type DOUBLE: ''. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('' AS double)
+ ^^^^^^^^^^^^^^^^^^
+
+
-- !query
SELECT CAST(NULL AS int)
-- !query schema
@@ -143,8 +191,8 @@ SELECT CAST('123.a' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '123.a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '123.a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('123.a' AS int)
^^^^^^^^^^^^^^^^^^^^
@@ -155,13 +203,37 @@ SELECT CAST('123.a' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '123.a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '123.a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('123.a' AS long)
^^^^^^^^^^^^^^^^^^^^^
+-- !query
+SELECT CAST('123.a' AS float)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type FLOAT: '123.a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('123.a' AS float)
+ ^^^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+SELECT CAST('123.a' AS double)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type DOUBLE: '123.a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('123.a' AS double)
+ ^^^^^^^^^^^^^^^^^^^^^^^
+
+
-- !query
SELECT CAST('-2147483648' AS int)
-- !query schema
@@ -175,8 +247,8 @@ SELECT CAST('-2147483649' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '-2147483649'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '-2147483649'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('-2147483649' AS int)
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -195,8 +267,8 @@ SELECT CAST('2147483648' AS int)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '2147483648'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '2147483648'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT CAST('2147483648' AS int)
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -215,8 +287,8 @@ SELECT CAST('-9223372036854775809' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '-9223372036854775809'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '-9223372036854775809'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
== SQL(line 1, position 7) ==
SELECT CAST('-9223372036854775809' AS long)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -235,8 +307,8 @@ SELECT CAST('9223372036854775808' AS long)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '9223372036854775808'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '9223372036854775808'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
== SQL(line 1, position 7) ==
SELECT CAST('9223372036854775808' AS long)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -494,8 +566,8 @@ select cast('1中文' as tinyint)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type TINYINT: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('1中文' as tinyint)
^^^^^^^^^^^^^^^^^^^^^^
@@ -506,8 +578,8 @@ select cast('1中文' as smallint)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type SMALLINT: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('1中文' as smallint)
^^^^^^^^^^^^^^^^^^^^^^^
@@ -518,8 +590,8 @@ select cast('1中文' as INT)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('1中文' as INT)
^^^^^^^^^^^^^^^^^^
@@ -530,8 +602,8 @@ select cast('中文1' as bigint)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '中文1'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '中文1'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('中文1' as bigint)
^^^^^^^^^^^^^^^^^^^^^
@@ -542,8 +614,8 @@ select cast('1中文' as bigint)
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: '1中文'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('1中文' as bigint)
^^^^^^^^^^^^^^^^^^^^^
@@ -606,7 +678,7 @@ select cast('xyz' as decimal(4, 2))
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'xyz'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DECIMAL(4,2): 'xyz'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('xyz' as decimal(4, 2))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -626,7 +698,7 @@ select cast('a' as date)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast a to DateType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type DATE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('a' as date)
^^^^^^^^^^^^^^^^^
@@ -646,7 +718,7 @@ select cast('a' as timestamp)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast a to TimestampType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('a' as timestamp)
^^^^^^^^^^^^^^^^^^^^^^
@@ -666,7 +738,7 @@ select cast('a' as timestamp_ntz)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast a to TimestampNTZType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP_NTZ: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast('a' as timestamp_ntz)
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -678,7 +750,7 @@ select cast(cast('inf' as double) as timestamp)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast Infinity to TimestampType. To return NULL instead, use 'try_cast'.
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP: Infinity. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast(cast('inf' as double) as timestamp)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -690,7 +762,7 @@ select cast(cast('inf' as float) as timestamp)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast Infinity to TimestampType. To return NULL instead, use 'try_cast'.
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP: Infinity. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast(cast('inf' as float) as timestamp)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index d9777b53d21..fa65b4dd071 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -232,7 +232,7 @@ select next_day("xx", "Mon")
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast xx to DateType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type DATE: 'xx'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select next_day("xx", "Mon")
^^^^^^^^^^^^^^^^^^^^^
@@ -326,8 +326,8 @@ select date_add('2011-11-11', '1.2')
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1.2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '1.2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select date_add('2011-11-11', '1.2')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -437,8 +437,8 @@ select date_sub(date'2011-11-11', '1.2')
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: '1.2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: '1.2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select date_sub(date'2011-11-11', '1.2')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index 57e39bbfe3a..e30b592020d 100644
---
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -242,7 +242,7 @@ select cast("Unparseable" as timestamp)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast Unparseable to TimestampType. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type TIMESTAMP: 'Unparseable'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
== SQL(line 1, position 7) ==
select cast("Unparseable" as timestamp)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -254,7 +254,7 @@ select cast("Unparseable" as date)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast Unparseable to DateType. To return NULL instead, use 'try_cast'.
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type DATE: 'Unparseable'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select cast("Unparseable" as date)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 3b8d95bca0a..d7975dfb58a 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -122,7 +122,7 @@ select interval 2 second * 'a'
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select interval 2 second * 'a'
^^^^^^^^^^^^^^^^^^^^^^^
@@ -134,7 +134,7 @@ select interval 2 second / 'a'
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select interval 2 second / 'a'
^^^^^^^^^^^^^^^^^^^^^^^
@@ -146,7 +146,7 @@ select interval 2 year * 'a'
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select interval 2 year * 'a'
^^^^^^^^^^^^^^^^^^^^^
@@ -158,7 +158,7 @@ select interval 2 year / 'a'
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select interval 2 year / 'a'
^^^^^^^^^^^^^^^^^^^^^
@@ -186,7 +186,7 @@ select 'a' * interval 2 second
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select 'a' * interval 2 second
^^^^^^^^^^^^^^^^^^^^^^^
@@ -198,7 +198,7 @@ select 'a' * interval 2 year
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select 'a' * interval 2 year
^^^^^^^^^^^^^^^^^^^^^
@@ -1516,7 +1516,7 @@ select '4 11:11' - interval '4 22:12' day to minute
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast 4 11:11 to TimestampType. To return NULL instead, use 'try_cast'.
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP: '4 11:11'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select '4 11:11' - interval '4 22:12' day to minute
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1528,7 +1528,7 @@ select '4 12:12:12' + interval '4 22:12' day to minute
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast 4 12:12:12 to TimestampType. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type TIMESTAMP: '4 12:12:12'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select '4 12:12:12' + interval '4 22:12' day to minute
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1566,7 +1566,7 @@ select str - interval '4 22:12' day to minute from
interval_view
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast 1 to TimestampType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP: '1'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select str - interval '4 22:12' day to minute from interval_view
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1578,7 +1578,7 @@ select str + interval '4 22:12' day to minute from
interval_view
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast 1 to TimestampType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP: '1'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select str + interval '4 22:12' day to minute from interval_view
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index 7d07282ab67..083471b15d4 100644
---
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -81,8 +81,8 @@ select left("abcd", -2), left("abcd", 0), left("abcd", 'a')
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 42) ==
...t("abcd", -2), left("abcd", 0), left("abcd", 'a')
^^^^^^^^^^^^^^^^^
@@ -109,8 +109,8 @@ select right("abcd", -2), right("abcd", 0), right("abcd",
'a')
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: 'a'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 44) ==
...("abcd", -2), right("abcd", 0), right("abcd", 'a')
^^^^^^^^^^^^^^^^^^
@@ -418,8 +418,8 @@ SELECT lpad('hi', 'invalid_length')
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'invalid_length'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: 'invalid_length'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT lpad('hi', 'invalid_length')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -430,8 +430,8 @@ SELECT rpad('hi', 'invalid_length')
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'invalid_length'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: 'invalid_length'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT rpad('hi', 'invalid_length')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
index 9ed02e3bed2..aaa82e43513 100644
--- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 66
+-- Number of queries: 72
-- !query
@@ -50,6 +50,22 @@ struct<CAST(abc AS BIGINT):bigint>
NULL
+-- !query
+SELECT CAST('abc' AS float)
+-- !query schema
+struct<CAST(abc AS FLOAT):float>
+-- !query output
+NULL
+
+
+-- !query
+SELECT CAST('abc' AS double)
+-- !query schema
+struct<CAST(abc AS DOUBLE):double>
+-- !query output
+NULL
+
+
-- !query
SELECT CAST('1234567890123' AS int)
-- !query schema
@@ -82,6 +98,22 @@ struct<CAST( AS BIGINT):bigint>
NULL
+-- !query
+SELECT CAST('' AS float)
+-- !query schema
+struct<CAST( AS FLOAT):float>
+-- !query output
+NULL
+
+
+-- !query
+SELECT CAST('' AS double)
+-- !query schema
+struct<CAST( AS DOUBLE):double>
+-- !query output
+NULL
+
+
-- !query
SELECT CAST(NULL AS int)
-- !query schema
@@ -114,6 +146,22 @@ struct<CAST(123.a AS BIGINT):bigint>
NULL
+-- !query
+SELECT CAST('123.a' AS float)
+-- !query schema
+struct<CAST(123.a AS FLOAT):float>
+-- !query output
+NULL
+
+
+-- !query
+SELECT CAST('123.a' AS double)
+-- !query schema
+struct<CAST(123.a AS DOUBLE):double>
+-- !query output
+NULL
+
+
-- !query
SELECT CAST('-2147483648' AS int)
-- !query schema
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
index a7ee7400e58..b63d2d1307e 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
@@ -96,7 +96,7 @@ SELECT float('N A N')
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'N A N'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type FLOAT: 'N A N'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT float('N A N')
^^^^^^^^^^^^^^
@@ -108,7 +108,7 @@ SELECT float('NaN x')
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'NaN x'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type FLOAT: 'NaN x'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT float('NaN x')
^^^^^^^^^^^^^^
@@ -120,7 +120,7 @@ SELECT float(' INFINITY x')
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: ' INFINITY x'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+Invalid input syntax for type FLOAT: ' INFINITY x'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT float(' INFINITY x')
^^^^^^^^^^^^^^^^^^^^^^^
@@ -156,7 +156,7 @@ SELECT float(decimal('nan'))
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'nan'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DECIMAL(10,0): 'nan'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 13) ==
SELECT float(decimal('nan'))
^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
index 3237969ea87..b0582c09523 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
@@ -128,7 +128,7 @@ SELECT double('N A N')
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'N A N'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'N A N'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT double('N A N')
^^^^^^^^^^^^^^^
@@ -140,7 +140,7 @@ SELECT double('NaN x')
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'NaN x'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DOUBLE: 'NaN x'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
SELECT double('NaN x')
^^^^^^^^^^^^^^^
@@ -152,7 +152,7 @@ SELECT double(' INFINITY x')
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: ' INFINITY x'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
+Invalid input syntax for type DOUBLE: ' INFINITY x'. To return NULL
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to
bypass this error.
== SQL(line 1, position 7) ==
SELECT double(' INFINITY x')
^^^^^^^^^^^^^^^^^^^^^^^^
@@ -188,7 +188,7 @@ SELECT double(decimal('nan'))
struct<>
-- !query output
org.apache.spark.SparkNumberFormatException
-invalid input syntax for type numeric: 'nan'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+Invalid input syntax for type DECIMAL(10,0): 'nan'. To return NULL instead,
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 14) ==
SELECT double(decimal('nan'))
^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
index 836370935f6..cff6bf28040 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
@@ -64,8 +64,8 @@ select string('four: ') || 2+2
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'four: 2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: 'four: 2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select string('four: ') || 2+2
^^^^^^^^^^^^^^^^^^^^^^^
@@ -76,8 +76,8 @@ select 'four: ' || 2+2
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'four: 2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type BIGINT: 'four: 2'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 1, position 7) ==
select 'four: ' || 2+2
^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
index 1d48d7c7b92..c48d92a9900 100644
---
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
@@ -461,8 +461,8 @@ window w as (order by f_numeric range between
-- !query schema
struct<>
-- !query output
-java.lang.NumberFormatException
-invalid input syntax for type numeric: 'NaN'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
+org.apache.spark.SparkNumberFormatException
+Invalid input syntax for type INT: 'NaN'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
== SQL(line 3, position 12) ==
window w as (order by f_numeric range between
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
index c799d65985d..b5281d4c605 100644
---
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
@@ -72,7 +72,7 @@ insert into datetimes values
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): Cannot cast
11:00 BST to TimestampType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): Invalid input
syntax for type TIMESTAMP: '11:00 BST'. To return NULL instead, use 'try_cast'.
If necessary set spark.sql.ansi.enabled to false to bypass this error.
== SQL(line 2, position 23) ==
(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as
timestamp), ...
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
index 87beeacc0bc..6beb6fd5958 100644
---
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
@@ -501,7 +501,7 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as
int))),(4,3),(5,4)) t(a,b)
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('nan' AS INT): invalid input syntax for
type numeric: 'nan'. To return NULL instead, use 'try_cast'. If necessary set
spark.sql.ansi.enabled to false to bypass this error.
+failed to evaluate expression CAST('nan' AS INT): Invalid input syntax for
type INT: 'nan'. To return NULL instead, use 'try_cast'. If necessary set
spark.sql.ansi.enabled to false to bypass this error.
== SQL(line 3, position 28) ==
FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)
^^^^^^^^^^^^^^^^^^
diff --git
a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
index af861e3913b..dc72dfe137d 100644
--- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 142
+-- Number of queries: 143
-- !query
diff --git
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 3f275b2a2bd..c09a7a1811c 100644
---
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -332,7 +332,7 @@ select to_timestamp(1)
struct<>
-- !query output
java.time.DateTimeException
-Cannot cast 1 to TimestampNTZType. To return NULL instead, use 'try_cast'. If
necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type TIMESTAMP_NTZ: '1'. To return NULL instead, use
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this
error.
-- !query
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
index 3ff526bd9db..259d00746ac 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
@@ -314,7 +314,7 @@ trait SQLInsertTestSuite extends QueryTest with
SQLTestUtils {
val errorMsg = intercept[NumberFormatException] {
sql("insert into t partition(a='ansi') values('ansi')")
}.getMessage
- assert(errorMsg.contains("invalid input syntax for type numeric:
'ansi'"))
+ assert(errorMsg.contains("Invalid input syntax for type INT:
'ansi'"))
} else {
sql("insert into t partition(a='ansi') values('ansi')")
checkAnswer(sql("select * from t"), Row("ansi", null) :: Nil)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]