This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e28c33bcf784 [SPARK-49578][SQL] Remove the ANSI config suggestion in
CAST_INVALID_INPUT and CAST_OVERFLOW
e28c33bcf784 is described below
commit e28c33bcf784b8a5b7cf29c2527ce78e4a3ada0f
Author: Mihailo Milosevic <[email protected]>
AuthorDate: Wed Sep 11 15:11:00 2024 +0200
[SPARK-49578][SQL] Remove the ANSI config suggestion in CAST_INVALID_INPUT
and CAST_OVERFLOW
### What changes were proposed in this pull request?
Removal of suggestion to turn off ansi when experiencing CAST_INVALID_INPUT
or CAST_OVERFLOW.
### Why are the changes needed?
These errors already contain sufficient suggestions, amongst which one is
to use `try_cast` in place of `cast`. This removal of ANSI turn off suggestion
is part of a movement to keep suggestions of not using ANSI in code to minimum.
### Does this PR introduce _any_ user-facing change?
Yes. This PR is changing user-facing error message.
Previous state:
```
SELECT CAST('a' AS INT)
org.apache.spark.SparkNumberFormatException: [CAST_INVALID_INPUT] The value
'a' of the type "STRING" cannot be cast to "INT" because it is malformed.
Correct the value as per the syntax, or change its target type. Use `try_cast`
to tolerate malformed input and return NULL instead. If necessary set
"spark.sql.ansi.enabled" to "false" to bypass this error.
SELECT CAST(2147483648L AS INT);
org.apache.spark.SparkArithmeticException: [CAST_OVERFLOW] The value
2147483648L of the type "BIGINT" cannot be cast to "INT" due to an overflow.
Use `try_cast` to tolerate overflow and return NULL instead. If necessary set
"spark.sql.ansi.enabled" to "false" to bypass this error.
```
Current state:
```
SELECT CAST('a' AS INT)
org.apache.spark.SparkNumberFormatException: [CAST_INVALID_INPUT] The value
'a' of the type "STRING" cannot be cast to "INT" because it is malformed.
Correct the value as per the syntax, or change its target type. Use `try_cast`
to tolerate malformed input and return NULL instead.
SELECT CAST(2147483648L AS INT);
org.apache.spark.SparkArithmeticException: [CAST_OVERFLOW] The value
2147483648L of the type "BIGINT" cannot be cast to "INT" due to an overflow.
Use `try_cast` to tolerate overflow and return NULL instead.
```
### How was this patch tested?
Existing tests check for existence of ansiConfig message parameter.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #48054 from mihailom-db/ANSI-errors.
Authored-by: Mihailo Milosevic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 4 +-
docs/sql-ref-ansi-compliance.md | 4 +-
.../sql/tests/test_dataframe_query_context.py | 22 ----------
.../apache/spark/sql/errors/DataTypeErrors.scala | 10 ++---
.../apache/spark/sql/errors/ExecutionErrors.scala | 3 +-
.../spark/sql/errors/QueryExecutionErrors.scala | 9 ++---
.../org/apache/spark/sql/types/DecimalSuite.scala | 3 +-
.../analyzer-results/execute-immediate.sql.out | 2 -
.../postgreSQL/window_part2.sql.out | 1 -
.../analyzer-results/sql-session-variables.sql.out | 4 --
.../resources/sql-tests/results/ansi/cast.sql.out | 47 ----------------------
.../results/ansi/conditional-functions.sql.out | 2 -
.../resources/sql-tests/results/ansi/date.sql.out | 3 --
.../results/ansi/datetime-parsing-invalid.sql.out | 2 -
.../sql-tests/results/ansi/interval.sql.out | 10 -----
.../resources/sql-tests/results/ansi/math.sql.out | 1 -
.../results/ansi/string-functions.sql.out | 4 --
.../test/resources/sql-tests/results/cast.sql.out | 5 ---
.../sql-tests/results/execute-immediate.sql.out | 2 -
.../sql-tests/results/postgreSQL/boolean.sql.out | 15 -------
.../sql-tests/results/postgreSQL/float4.sql.out | 7 ----
.../sql-tests/results/postgreSQL/float8.sql.out | 5 ---
.../sql-tests/results/postgreSQL/int8.sql.out | 4 --
.../sql-tests/results/postgreSQL/text.sql.out | 2 -
.../sql-tests/results/postgreSQL/union.sql.out | 1 -
.../results/postgreSQL/window_part2.sql.out | 1 -
.../results/sql-session-variables.sql.out | 4 --
.../results/timestampNTZ/timestamp-ansi.sql.out | 1 -
.../results/view-schema-binding-config.sql.out | 1 -
.../results/view-schema-compensation.sql.out | 1 -
.../org/apache/spark/sql/SQLInsertTestSuite.scala | 3 +-
.../sql/errors/QueryExecutionAnsiErrorsSuite.scala | 15 +++----
.../sql/errors/QueryExecutionErrorsSuite.scala | 3 +-
.../command/AlterTableAddPartitionSuiteBase.scala | 1 -
.../command/v2/AlterTableAddPartitionSuite.scala | 1 -
35 files changed, 19 insertions(+), 184 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 4bc48c042a0b..f254a11eea95 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -455,13 +455,13 @@
},
"CAST_INVALID_INPUT" : {
"message" : [
- "The value <expression> of the type <sourceType> cannot be cast to
<targetType> because it is malformed. Correct the value as per the syntax, or
change its target type. Use `try_cast` to tolerate malformed input and return
NULL instead. If necessary set <ansiConfig> to \"false\" to bypass this error."
+ "The value <expression> of the type <sourceType> cannot be cast to
<targetType> because it is malformed. Correct the value as per the syntax, or
change its target type. Use `try_cast` to tolerate malformed input and return
NULL instead."
],
"sqlState" : "22018"
},
"CAST_OVERFLOW" : {
"message" : [
- "The value <value> of the type <sourceType> cannot be cast to
<targetType> due to an overflow. Use `try_cast` to tolerate overflow and return
NULL instead. If necessary set <ansiConfig> to \"false\" to bypass this error."
+ "The value <value> of the type <sourceType> cannot be cast to
<targetType> due to an overflow. Use `try_cast` to tolerate overflow and return
NULL instead."
],
"sqlState" : "22003"
},
diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index fd56a9d4117a..3fa67036fd04 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -141,13 +141,13 @@ In the table above, all the `CAST`s with new syntax are
marked as red <span styl
-- `spark.sql.ansi.enabled=true` (This is a default behaviour)
SELECT CAST('a' AS INT);
-org.apache.spark.SparkNumberFormatException: [CAST_INVALID_INPUT] The value
'a' of the type "STRING" cannot be cast to "INT" because it is malformed.
Correct the value as per the syntax, or change its target type. Use `try_cast`
to tolerate malformed input and return NULL instead. If necessary set
"spark.sql.ansi.enabled" to "false" to bypass this error.
+org.apache.spark.SparkNumberFormatException: [CAST_INVALID_INPUT] The value
'a' of the type "STRING" cannot be cast to "INT" because it is malformed.
Correct the value as per the syntax, or change its target type. Use `try_cast`
to tolerate malformed input and return NULL instead.
== SQL(line 1, position 8) ==
SELECT CAST('a' AS INT)
^^^^^^^^^^^^^^^^
SELECT CAST(2147483648L AS INT);
-org.apache.spark.SparkArithmeticException: [CAST_OVERFLOW] The value
2147483648L of the type "BIGINT" cannot be cast to "INT" due to an overflow.
Use `try_cast` to tolerate overflow and return NULL instead. If necessary set
"spark.sql.ansi.enabled" to "false" to bypass this error.
+org.apache.spark.SparkArithmeticException: [CAST_OVERFLOW] The value
2147483648L of the type "BIGINT" cannot be cast to "INT" due to an overflow.
Use `try_cast` to tolerate overflow and return NULL instead.
SELECT CAST(DATE'2020-01-01' AS INT);
org.apache.spark.sql.AnalysisException: cannot resolve 'CAST(DATE '2020-01-01'
AS INT)' due to data type mismatch: cannot cast date to int.
diff --git a/python/pyspark/sql/tests/test_dataframe_query_context.py
b/python/pyspark/sql/tests/test_dataframe_query_context.py
index 3f31f1d62d73..bf0cc021ca77 100644
--- a/python/pyspark/sql/tests/test_dataframe_query_context.py
+++ b/python/pyspark/sql/tests/test_dataframe_query_context.py
@@ -54,7 +54,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__add__",
@@ -70,7 +69,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__sub__",
@@ -86,7 +84,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__mul__",
@@ -102,7 +99,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__mod__",
@@ -118,7 +114,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__eq__",
@@ -134,7 +129,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__lt__",
@@ -150,7 +144,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__le__",
@@ -166,7 +159,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__ge__",
@@ -182,7 +174,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__gt__",
@@ -198,7 +189,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="eqNullSafe",
@@ -214,7 +204,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="bitwiseOR",
@@ -230,7 +219,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="bitwiseAND",
@@ -246,7 +234,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="bitwiseXOR",
@@ -279,7 +266,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__add__",
@@ -299,7 +285,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__sub__",
@@ -317,7 +302,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__mul__",
@@ -344,7 +328,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__add__",
@@ -360,7 +343,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__sub__",
@@ -376,7 +358,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__mul__",
@@ -407,7 +388,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__add__",
@@ -425,7 +405,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__sub__",
@@ -443,7 +422,6 @@ class DataFrameQueryContextTestsMixin:
"expression": "'string'",
"sourceType": '"STRING"',
"targetType": '"BIGINT"',
- "ansiConfig": '"spark.sql.ansi.enabled"',
},
query_context_type=QueryContextType.DataFrame,
fragment="__mul__",
diff --git
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
index fcd8440d46a6..388a98569258 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
@@ -208,8 +208,7 @@ private[sql] object DataTypeErrors extends
DataTypeErrorsBase {
messageParameters = Map(
"expression" -> convertedValueStr,
"sourceType" -> toSQLType(StringType),
- "targetType" -> toSQLType(to),
- "ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
+ "targetType" -> toSQLType(to)),
context = getQueryContext(context),
summary = getSummary(context))
}
@@ -227,11 +226,8 @@ private[sql] object DataTypeErrors extends
DataTypeErrorsBase {
def castingCauseOverflowError(t: String, from: DataType, to: DataType):
ArithmeticException = {
new SparkArithmeticException(
errorClass = "CAST_OVERFLOW",
- messageParameters = Map(
- "value" -> t,
- "sourceType" -> toSQLType(from),
- "targetType" -> toSQLType(to),
- "ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
+ messageParameters =
+ Map("value" -> t, "sourceType" -> toSQLType(from), "targetType" ->
toSQLType(to)),
context = Array.empty,
summary = "")
}
diff --git
a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
index 99fffff678a7..5b761e917057 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
@@ -109,8 +109,7 @@ private[sql] trait ExecutionErrors extends
DataTypeErrorsBase {
messageParameters = Map(
"expression" -> sqlValue,
"sourceType" -> toSQLType(from),
- "targetType" -> toSQLType(to),
- "ansiConfig" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)),
+ "targetType" -> toSQLType(to)),
context = getQueryContext(context),
summary = getSummary(context))
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index d6e23fcc65cd..ad8437ed7a50 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -79,8 +79,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
messageParameters = Map(
"value" -> toSQLValue(t, from),
"sourceType" -> toSQLType(from),
- "targetType" -> toSQLType(to),
- "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ "targetType" -> toSQLType(to)),
context = Array.empty,
summary = "")
}
@@ -124,8 +123,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
messageParameters = Map(
"expression" -> toSQLValue(s, StringType),
"sourceType" -> toSQLType(StringType),
- "targetType" -> toSQLType(BooleanType),
- "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ "targetType" -> toSQLType(BooleanType)),
context = getQueryContext(context),
summary = getSummary(context))
}
@@ -139,8 +137,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
messageParameters = Map(
"expression" -> toSQLValue(s, StringType),
"sourceType" -> toSQLType(StringType),
- "targetType" -> toSQLType(to),
- "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ "targetType" -> toSQLType(to)),
context = getQueryContext(context),
summary = getSummary(context))
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index 8d3af48ba689..794112db5502 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -374,8 +374,7 @@ class DecimalSuite extends SparkFunSuite with
PrivateMethodTester with SQLHelper
parameters = Map(
"expression" -> "'str'",
"sourceType" -> "\"STRING\"",
- "targetType" -> "\"DECIMAL(10,0)\"",
- "ansiConfig" -> "\"spark.sql.ansi.enabled\""))
+ "targetType" -> "\"DECIMAL(10,0)\""))
}
test("SPARK-35841: Casting string to decimal type doesn't work " +
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
index 78bf1ccb1678..ce510527c878 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
@@ -471,7 +471,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'invalid_cast_error_expected'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -662,7 +661,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'name1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/window_part2.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/window_part2.sql.out
index cdcd563de4f6..330e1c1cad7e 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/window_part2.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/window_part2.sql.out
@@ -449,7 +449,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'NaN'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-session-variables.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-session-variables.sql.out
index eb48f0d9a28f..a4e40f08b446 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-session-variables.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-session-variables.sql.out
@@ -842,7 +842,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'hello'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -885,7 +884,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INT\"",
"targetType" : "\"SMALLINT\"",
"value" : "100000"
@@ -1002,7 +1000,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"INT\"",
"value" : "1.0E10D"
@@ -1062,7 +1059,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'hello'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 7dd7180165f2..0dbdf1d9975c 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -9,7 +9,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1.23'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -34,7 +33,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1.23'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -59,7 +57,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'-4.56'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -84,7 +81,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'-4.56'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -109,7 +105,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -134,7 +129,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -159,7 +153,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"FLOAT\""
@@ -184,7 +177,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -209,7 +201,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1234567890123'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -234,7 +225,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'12345678901234567890123'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -259,7 +249,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -284,7 +273,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -309,7 +297,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"FLOAT\""
@@ -334,7 +321,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -375,7 +361,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'123.a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -400,7 +385,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'123.a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -425,7 +409,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'123.a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"FLOAT\""
@@ -450,7 +433,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'123.a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -483,7 +465,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'-2147483649'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -516,7 +497,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'2147483648'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -549,7 +529,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'-9223372036854775809'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -582,7 +561,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'9223372036854775808'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -973,7 +951,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1中文'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TINYINT\""
@@ -998,7 +975,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1中文'",
"sourceType" : "\"STRING\"",
"targetType" : "\"SMALLINT\""
@@ -1023,7 +999,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1中文'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -1048,7 +1023,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'中文1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -1073,7 +1047,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1中文'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -1116,7 +1089,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'\t\n xyz \t\r'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -1174,7 +1146,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'xyz'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DECIMAL(4,2)\""
@@ -1207,7 +1178,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DATE\""
@@ -1240,7 +1210,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP\""
@@ -1273,7 +1242,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP_NTZ\""
@@ -1298,7 +1266,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "Infinity",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"TIMESTAMP\""
@@ -1323,7 +1290,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "Infinity",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"TIMESTAMP\""
@@ -1380,7 +1346,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INTERVAL HOUR TO SECOND\"",
"targetType" : "\"SMALLINT\"",
"value" : "INTERVAL '23:59:59' HOUR TO SECOND"
@@ -1414,7 +1379,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INTERVAL MONTH\"",
"targetType" : "\"TINYINT\"",
"value" : "INTERVAL '-1000' MONTH"
@@ -1432,7 +1396,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INTERVAL SECOND\"",
"targetType" : "\"SMALLINT\"",
"value" : "INTERVAL '1000000' SECOND"
@@ -1522,7 +1485,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INT\"",
"targetType" : "\"INTERVAL YEAR\"",
"value" : "2147483647"
@@ -1540,7 +1502,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"INTERVAL DAY\"",
"value" : "-9223372036854775808L"
@@ -1671,7 +1632,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1.23'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -1696,7 +1656,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -1721,7 +1680,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'12345678901234567890123'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -1746,7 +1704,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -1779,7 +1736,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'123.a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -1927,7 +1883,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1.23'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -1952,7 +1907,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"INT\"",
"value" : "2147483648L"
@@ -1970,7 +1924,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"INT\"",
"value" : "2147483648L"
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out
index 26293cad10ce..aa8a600f8756 100644
---
a/sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out
@@ -145,7 +145,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -181,7 +180,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'abc'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index b1b26b2f74ad..16cabe60419c 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -309,7 +309,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'xx'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DATE\""
@@ -468,7 +467,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1.2'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -643,7 +641,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1.2'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index 514a0c6ae7d3..0708a523900f 100644
---
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -427,7 +427,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'Unparseable'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP\""
@@ -452,7 +451,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'Unparseable'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DATE\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 4e220ba9885c..0c68023b6eae 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -130,7 +130,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -155,7 +154,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -180,7 +178,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -205,7 +202,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -246,7 +242,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -271,7 +266,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -1945,7 +1939,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'4 11:11'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP\""
@@ -1970,7 +1963,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'4 12:12:12'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP\""
@@ -2051,7 +2043,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP\""
@@ -2076,7 +2067,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP\""
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out
index e2abcb099130..fb60a920040e 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out
@@ -881,7 +881,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'invalid'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index 93077221ee5a..cf1bce3c0e50 100644
---
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -101,7 +101,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -142,7 +141,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -507,7 +505,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'invalid_length'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -532,7 +529,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'invalid_length'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
index 6f74c63da354..738697c63883 100644
--- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
@@ -642,7 +642,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INTERVAL HOUR TO SECOND\"",
"targetType" : "\"SMALLINT\"",
"value" : "INTERVAL '23:59:59' HOUR TO SECOND"
@@ -676,7 +675,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INTERVAL MONTH\"",
"targetType" : "\"TINYINT\"",
"value" : "INTERVAL '-1000' MONTH"
@@ -694,7 +692,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INTERVAL SECOND\"",
"targetType" : "\"SMALLINT\"",
"value" : "INTERVAL '1000000' SECOND"
@@ -784,7 +781,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INT\"",
"targetType" : "\"INTERVAL YEAR\"",
"value" : "2147483647"
@@ -802,7 +798,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"INTERVAL DAY\"",
"value" : "-9223372036854775808L"
diff --git
a/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out
b/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out
index 9249d7eb3e51..21ea4436f4fa 100644
--- a/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out
@@ -392,7 +392,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'invalid_cast_error_expected'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -603,7 +602,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'name1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
index 12660768b95c..052e7b4f2522 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
@@ -57,7 +57,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'test'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -90,7 +89,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'foo'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -131,7 +129,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'yeah'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -172,7 +169,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'nay'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -197,7 +193,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'on'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -222,7 +217,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'off'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -247,7 +241,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'of'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -272,7 +265,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'o'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -297,7 +289,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'on_'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -322,7 +313,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'off_'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -355,7 +345,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'11'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -388,7 +377,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'000'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -413,7 +401,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -535,7 +522,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "' tru e '",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
@@ -560,7 +546,6 @@ org.apache.spark.SparkRuntimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "''",
"sourceType" : "\"STRING\"",
"targetType" : "\"BOOLEAN\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
index 6b4b343d9cca..1a15610b4ded 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
@@ -97,7 +97,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'N A N'",
"sourceType" : "\"STRING\"",
"targetType" : "\"FLOAT\""
@@ -122,7 +121,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'NaN x'",
"sourceType" : "\"STRING\"",
"targetType" : "\"FLOAT\""
@@ -147,7 +145,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "' INFINITY x'",
"sourceType" : "\"STRING\"",
"targetType" : "\"FLOAT\""
@@ -196,7 +193,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'nan'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DECIMAL(10,0)\""
@@ -393,7 +389,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"FLOAT\"",
"targetType" : "\"INT\"",
"value" : "2.14748365E9"
@@ -419,7 +414,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"FLOAT\"",
"targetType" : "\"INT\"",
"value" : "-2.1474839E9"
@@ -461,7 +455,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"FLOAT\"",
"targetType" : "\"BIGINT\"",
"value" : "-9.22338E18"
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
index e1b880f34370..b1a114bea30e 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
@@ -129,7 +129,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'N A N'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -154,7 +153,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'NaN x'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -179,7 +177,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "' INFINITY x'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
@@ -228,7 +225,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'nan'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DECIMAL(10,0)\""
@@ -898,7 +894,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"BIGINT\"",
"value" : "-9.22337203685478E18D"
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
index f6e4bd8bd7e0..5e8abc273b12 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
@@ -737,7 +737,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"INT\"",
"value" : "4567890123456789L"
@@ -763,7 +762,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"SMALLINT\"",
"value" : "4567890123456789L"
@@ -809,7 +807,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"BIGINT\"",
"value" : "9.223372036854776E20D"
@@ -898,7 +895,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"BIGINT\"",
"targetType" : "\"INT\"",
"value" : "-9223372036854775808L"
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
index 37b8a3e8fd19..0a940f5f3c74 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
@@ -66,7 +66,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'four: 2'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
@@ -91,7 +90,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'four: 2'",
"sourceType" : "\"STRING\"",
"targetType" : "\"BIGINT\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out
index 7c920bbd32b3..94692a57300f 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out
@@ -700,7 +700,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'foo'",
"sourceType" : "\"STRING\"",
"targetType" : "\"DOUBLE\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
index 6cf5e69758d2..352c5f05cb06 100644
---
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
@@ -489,7 +489,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'NaN'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/sql-session-variables.sql.out
b/sql/core/src/test/resources/sql-tests/results/sql-session-variables.sql.out
index 73d3ec737085..249a03fdfbf8 100644
---
a/sql/core/src/test/resources/sql-tests/results/sql-session-variables.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/sql-session-variables.sql.out
@@ -943,7 +943,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'hello'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
@@ -990,7 +989,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INT\"",
"targetType" : "\"SMALLINT\"",
"value" : "100000"
@@ -1104,7 +1102,6 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"INT\"",
"value" : "1.0E10D"
@@ -1171,7 +1168,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'hello'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 25aaadfc8e78..cd94674d2bf2 100644
---
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -384,7 +384,6 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"TIMESTAMP_NTZ\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/view-schema-binding-config.sql.out
b/sql/core/src/test/resources/sql-tests/results/view-schema-binding-config.sql.out
index b0d497e07047..4288457d56b4 100644
---
a/sql/core/src/test/resources/sql-tests/results/view-schema-binding-config.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/view-schema-binding-config.sql.out
@@ -701,7 +701,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/resources/sql-tests/results/view-schema-compensation.sql.out
b/sql/core/src/test/resources/sql-tests/results/view-schema-compensation.sql.out
index ffd1fbec47bb..641365309d51 100644
---
a/sql/core/src/test/resources/sql-tests/results/view-schema-compensation.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/view-schema-compensation.sql.out
@@ -187,7 +187,6 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'a'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
index fc344ca0be3f..170105200f1d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
@@ -445,8 +445,7 @@ trait SQLInsertTestSuite extends QueryTest with
SQLTestUtils with AdaptiveSparkP
parameters = Map(
"expression" -> "'ansi'",
"sourceType" -> "\"STRING\"",
- "targetType" -> "\"INT\"",
- "ansiConfig" -> "\"spark.sql.ansi.enabled\""
+ "targetType" -> "\"INT\""
),
context = ExpectedContext("insert into t partition(a='ansi')",
0, 32)
)
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index c330e944cae8..ec92e0b700e3 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -48,8 +48,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
condition = "CAST_OVERFLOW",
parameters = Map("value" -> "TIMESTAMP '9999-12-31 04:13:14.56789'",
"sourceType" -> "\"TIMESTAMP\"",
- "targetType" -> "\"INT\"",
- "ansiConfig" -> ansiConf),
+ "targetType" -> "\"INT\""),
sqlState = "22003")
}
@@ -212,8 +211,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
parameters = Map(
"expression" -> "'111111111111xe23'",
"sourceType" -> "\"STRING\"",
- "targetType" -> "\"DOUBLE\"",
- "ansiConfig" -> ansiConf),
+ "targetType" -> "\"DOUBLE\""),
context = ExpectedContext(
fragment = "CAST('111111111111xe23' AS DOUBLE)",
start = 7,
@@ -227,8 +225,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
parameters = Map(
"expression" -> "'111111111111xe23'",
"sourceType" -> "\"STRING\"",
- "targetType" -> "\"DOUBLE\"",
- "ansiConfig" -> ansiConf),
+ "targetType" -> "\"DOUBLE\""),
context = ExpectedContext(
fragment = "cast",
callSitePattern = getCurrentClassCallSitePattern))
@@ -275,8 +272,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
condition = "CAST_OVERFLOW",
parameters = Map("value" -> "1.2345678901234567E19D",
"sourceType" -> "\"DOUBLE\"",
- "targetType" -> ("\"TINYINT\""),
- "ansiConfig" -> ansiConf)
+ "targetType" -> ("\"TINYINT\""))
)
}
@@ -294,8 +290,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
condition = "CAST_OVERFLOW",
parameters = Map("value" -> "-1.2345678901234567E19D",
"sourceType" -> "\"DOUBLE\"",
- "targetType" -> "\"TINYINT\"",
- "ansiConfig" -> ansiConf),
+ "targetType" -> "\"TINYINT\""),
sqlState = "22003")
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 307e70d8c6b7..da1366350d03 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -734,8 +734,7 @@ class QueryExecutionErrorsSuite
parameters = Map(
"value" -> sourceValue,
"sourceType" -> s""""${sourceType.sql}"""",
- "targetType" -> s""""$it"""",
- "ansiConfig" -> s""""${SQLConf.ANSI_ENABLED.key}""""),
+ "targetType" -> s""""$it""""),
sqlState = "22003")
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
index 8c338f71e8a3..cb25942822f4 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
@@ -236,7 +236,6 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest
with DDLCommandTestUtils
},
condition = "CAST_INVALID_INPUT",
parameters = Map(
- "ansiConfig" -> "\"spark.sql.ansi.enabled\"",
"expression" -> "'aaa'",
"sourceType" -> "\"STRING\"",
"targetType" -> "\"INT\""),
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
index ac142e029681..e3b6a9b5e610 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
@@ -148,7 +148,6 @@ class AlterTableAddPartitionSuite
},
condition = "CAST_INVALID_INPUT",
parameters = Map(
- "ansiConfig" -> "\"spark.sql.ansi.enabled\"",
"expression" -> "'aaa'",
"sourceType" -> "\"STRING\"",
"targetType" -> "\"INT\""),
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]