This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2cb66e6a86ee [SPARK-49632][SQL] Remove the ANSI config suggestion in 
CANNOT_PARSE_TIMESTAMP
2cb66e6a86ee is described below

commit 2cb66e6a86ee223dc97bf68579ae426af954af0d
Author: Mihailo Milosevic <[email protected]>
AuthorDate: Thu Dec 19 08:48:42 2024 +0900

    [SPARK-49632][SQL] Remove the ANSI config suggestion in 
CANNOT_PARSE_TIMESTAMP
    
    ### What changes were proposed in this pull request?
    This PR changes the message returned on a failure of `ToTimestamp` family 
of expressions.
    
    ### Why are the changes needed?
    CANNOT_PARSE_TIMESTAMP contains suggested fix for turning off ANSI mode. 
Now that in Spark 4.0.0 we have moved to ANSI mode on by default, we want to 
keep suggestions of this kind to the minimum.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, error message is changing.
    
    ### How was this patch tested?
    Existing tests span the error message change.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #49227 from mihailom-db/cannot_parse_timestamp.
    
    Authored-by: Mihailo Milosevic <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 .../src/main/resources/error/error-conditions.json |  2 +-
 .../spark/sql/errors/QueryExecutionErrors.scala    |  3 +--
 .../test/resources/sql-tests/results/date.sql.out  |  1 -
 .../sql-tests/results/datetime-legacy.sql.out      | 24 ----------------------
 .../results/datetime-parsing-invalid.sql.out       | 24 ----------------------
 .../resources/sql-tests/results/timestamp.sql.out  |  6 ------
 .../results/timestampNTZ/timestamp-ansi.sql.out    |  6 ------
 .../native/stringCastAndExpressions.sql.out        |  2 --
 .../sql/errors/QueryExecutionAnsiErrorsSuite.scala |  4 +---
 9 files changed, 3 insertions(+), 69 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 8c2fc8a038b0..695f89d741c1 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -377,7 +377,7 @@
   },
   "CANNOT_PARSE_TIMESTAMP" : {
     "message" : [
-      "<message>. If necessary set <ansiConfig> to \"false\" to bypass this 
error."
+      "<message>. Use `try_to_timestamp` to tolerate invalid input string and 
return NULL instead."
     ],
     "sqlState" : "22007"
   },
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 86137fc1c3c0..061eaf45cffb 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -269,8 +269,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
     new SparkDateTimeException(
       errorClass = "CANNOT_PARSE_TIMESTAMP",
       messageParameters = Map(
-        "message" -> e.getMessage,
-        "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
+        "message" -> e.getMessage),
       context = Array.empty,
       summary = "")
   }
diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/date.sql.out
index aa283d324961..8b892b1795a1 100644
--- a/sql/core/src/test/resources/sql-tests/results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out
@@ -207,7 +207,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Invalid date 'February 29' as '1970' is not a leap year"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out 
b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index 5635196efc2e..9a40d4fd1316 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -207,7 +207,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"02-29\""
   }
 }
@@ -1585,7 +1584,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.\""
   }
 }
@@ -1601,7 +1599,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.0\""
   }
 }
@@ -1617,7 +1614,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1\""
   }
 }
@@ -1633,7 +1629,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.12\""
   }
 }
@@ -1649,7 +1644,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.123UTC\""
   }
 }
@@ -1665,7 +1659,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1234\""
   }
 }
@@ -1681,7 +1674,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.12345CST\""
   }
 }
@@ -1697,7 +1689,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.123456PST\""
   }
 }
@@ -1713,7 +1704,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1234567PST\""
   }
 }
@@ -1729,7 +1719,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"123456 2019-10-06 10:11:12.123456PST\""
   }
 }
@@ -1745,7 +1734,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"223456 2019-10-06 10:11:12.123456PST\""
   }
 }
@@ -1761,7 +1749,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1234\""
   }
 }
@@ -1777,7 +1764,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.123\""
   }
 }
@@ -1793,7 +1779,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12\""
   }
 }
@@ -1809,7 +1794,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.12\""
   }
 }
@@ -1825,7 +1809,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 10:11\""
   }
 }
@@ -1841,7 +1824,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06S10:11:12.12345\""
   }
 }
@@ -1857,7 +1839,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"12.12342019-10-06S10:11\""
   }
 }
@@ -1873,7 +1854,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"12.1232019-10-06S10:11\""
   }
 }
@@ -1889,7 +1869,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"12.1232019-10-06S10:11\""
   }
 }
@@ -1905,7 +1884,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"12.1234019-10-06S10:11\""
   }
 }
@@ -1977,7 +1955,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"02-29\""
   }
 }
@@ -2208,7 +2185,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Unparseable date: \"2019-10-06 A\""
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
index 0708a523900f..9e3b0e2d1e0f 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
@@ -18,7 +18,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '1' could not be parsed at index 0"
   }
 }
@@ -34,7 +33,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '-12' could not be parsed at index 0"
   }
 }
@@ -50,7 +48,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '123' could not be parsed, unparsed text found at index 
2"
   }
 }
@@ -66,7 +63,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '1' could not be parsed at index 0"
   }
 }
@@ -99,7 +95,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year"
   }
 }
@@ -115,7 +110,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '9' could not be parsed at index 0"
   }
 }
@@ -131,7 +125,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year"
   }
 }
@@ -147,7 +140,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '9' could not be parsed at index 0"
   }
 }
@@ -163,7 +155,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '99' could not be parsed at index 0"
   }
 }
@@ -179,7 +170,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31."
   }
 }
@@ -195,7 +185,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Conflict found: Field MonthOfYear 11 differs from MonthOfYear 
12 derived from 1970-12-31."
   }
 }
@@ -211,7 +200,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2019-366' could not be parsed: Invalid date 'DayOfYear 
366' as '2019' is not a leap year"
   }
 }
@@ -227,7 +215,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31."
   }
 }
@@ -243,7 +230,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-01-365' could not be parsed: Conflict found: Field 
DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30"
   }
 }
@@ -259,7 +245,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-10-350' could not be parsed: Conflict found: Field 
MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15"
   }
 }
@@ -275,7 +260,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-11-31-366' could not be parsed: Invalid date 
'NOVEMBER 31'"
   }
 }
@@ -299,7 +283,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -315,7 +298,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
@@ -331,7 +313,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -347,7 +328,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
@@ -363,7 +343,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -379,7 +358,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
@@ -395,7 +373,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -411,7 +388,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index e3cf1a154922..69025271601e 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -395,7 +395,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2019-10-06 10:11:12.' could not be parsed at index 20"
   }
 }
@@ -467,7 +466,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2019-10-06 10:11:12.1234567PST' could not be parsed, 
unparsed text found at index 26"
   }
 }
@@ -491,7 +489,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '223456 2019-10-06 10:11:12.123456PST' could not be 
parsed at index 27"
   }
 }
@@ -563,7 +560,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 7"
   }
 }
@@ -579,7 +575,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 9"
   }
 }
@@ -659,7 +654,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Invalid date 'February 29' as '1970' is not a leap year"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 681306ba9f40..14835ae0c592 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -409,7 +409,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2019-10-06 10:11:12.' could not be parsed at index 20"
   }
 }
@@ -481,7 +480,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '2019-10-06 10:11:12.1234567PST' could not be parsed, 
unparsed text found at index 26"
   }
 }
@@ -505,7 +503,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '223456 2019-10-06 10:11:12.123456PST' could not be 
parsed at index 27"
   }
 }
@@ -577,7 +574,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 7"
   }
 }
@@ -593,7 +589,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 9"
   }
 }
@@ -673,7 +668,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Invalid date 'February 29' as '1970' is not a leap year"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
index 781fff4835c5..0b043c70cc23 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
@@ -376,7 +376,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text 'aa' could not be parsed at index 0"
   }
 }
@@ -409,7 +408,6 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
-    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Text 'aa' could not be parsed at index 0"
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index a9ff7c308c15..52ef662080fb 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -239,9 +239,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
         sql("select to_timestamp('abc', 'yyyy-MM-dd HH:mm:ss')").collect()
       },
       condition = "CANNOT_PARSE_TIMESTAMP",
-      parameters = Map(
-        "message" -> "Text 'abc' could not be parsed at index 0",
-        "ansiConfig" -> ansiConf)
+      parameters = Map("message" -> "Text 'abc' could not be parsed at index 
0")
     )
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to