This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new be2da526b482 [SPARK-49632][SQL][FOLLOW-UP] Fix suggestion for 
`to_date` function
be2da526b482 is described below

commit be2da526b4823216edbd0192ba5a2994718f13a9
Author: Mihailo Milosevic <[email protected]>
AuthorDate: Thu Dec 26 11:39:58 2024 +0900

    [SPARK-49632][SQL][FOLLOW-UP] Fix suggestion for `to_date` function
    
    ### What changes were proposed in this pull request?
    Change of suggested function on failure from `try_to_timestamp` to 
`try_to_date` for `to_date` function.
    
    ### Why are the changes needed?
    In original PR https://github.com/apache/spark/pull/49227 we removed ANSI 
suggestion and left only a suggestion of try function to use. In case of 
`to_date`, use of `try_to_date` is more appropriate.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes
    
    ### How was this patch tested?
    Existing tests scope error message change.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #49290 from mihailom-db/cannot-parse-timestamp-follow-up.
    
    Authored-by: Mihailo Milosevic <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 .../src/main/resources/error/error-conditions.json |  2 +-
 .../catalyst/expressions/datetimeExpressions.scala | 14 +++++++------
 .../spark/sql/errors/QueryExecutionErrors.scala    |  6 ++++--
 .../function_to_date_with_format.explain           |  2 +-
 .../function_to_timestamp_ltz_with_format.explain  |  2 +-
 .../function_to_timestamp_ntz_with_format.explain  |  2 +-
 .../function_to_timestamp_with_format.explain      |  2 +-
 .../function_try_to_timestamp.explain              |  2 +-
 .../explain-results/function_unix_date.explain     |  2 +-
 .../explain-results/function_unix_micros.explain   |  2 +-
 .../explain-results/function_unix_millis.explain   |  2 +-
 .../explain-results/function_unix_seconds.explain  |  2 +-
 .../test/resources/sql-tests/results/date.sql.out  |  1 +
 .../sql-tests/results/datetime-legacy.sql.out      | 24 ++++++++++++++++++++++
 .../results/datetime-parsing-invalid.sql.out       | 24 ++++++++++++++++++++++
 .../resources/sql-tests/results/timestamp.sql.out  |  6 ++++++
 .../results/timestampNTZ/timestamp-ansi.sql.out    |  6 ++++++
 .../native/stringCastAndExpressions.sql.out        |  2 ++
 .../sql/errors/QueryExecutionAnsiErrorsSuite.scala |  4 +++-
 19 files changed, 88 insertions(+), 19 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index deb62866f072..26b3de7f5089 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -377,7 +377,7 @@
   },
   "CANNOT_PARSE_TIMESTAMP" : {
     "message" : [
-      "<message>. Use `try_to_timestamp` to tolerate invalid input string and 
return NULL instead."
+      "<message>. Use <func> to tolerate invalid input string and return NULL 
instead."
     ],
     "sqlState" : "22007"
   },
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 55e6c7f1503f..12fe456eace8 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -1129,6 +1129,7 @@ case class GetTimestamp(
     left: Expression,
     right: Expression,
     override val dataType: DataType,
+    override val suggestedFuncOnFail: String = "try_to_timestamp",
     timeZoneId: Option[String] = None,
     failOnError: Boolean = SQLConf.get.ansiEnabled) extends ToTimestamp {
 
@@ -1267,6 +1268,7 @@ object TryToTimestampExpressionBuilder extends 
ExpressionBuilder {
 abstract class ToTimestamp
   extends BinaryExpression with TimestampFormatterHelper with 
ExpectsInputTypes {
 
+  val suggestedFuncOnFail: String = "try_to_timestamp"
   def failOnError: Boolean
 
   // The result of the conversion to timestamp is microseconds divided by this 
factor.
@@ -1321,9 +1323,9 @@ abstract class ToTimestamp
               }
             } catch {
               case e: DateTimeException if failOnError =>
-                throw QueryExecutionErrors.ansiDateTimeParseError(e)
+                throw QueryExecutionErrors.ansiDateTimeParseError(e, 
suggestedFuncOnFail)
               case e: ParseException if failOnError =>
-                throw QueryExecutionErrors.ansiDateTimeParseError(e)
+                throw QueryExecutionErrors.ansiDateTimeParseError(e, 
suggestedFuncOnFail)
               case e if isParseError(e) => null
             }
           }
@@ -1334,7 +1336,7 @@ abstract class ToTimestamp
   override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
     val javaType = CodeGenerator.javaType(dataType)
     val parseErrorBranch: String = if (failOnError) {
-      "throw QueryExecutionErrors.ansiDateTimeParseError(e);"
+      s"throw QueryExecutionErrors.ansiDateTimeParseError(e, 
\"${suggestedFuncOnFail}\");"
     } else {
       s"${ev.isNull} = true;"
     }
@@ -2100,8 +2102,8 @@ case class ParseToDate(
   extends RuntimeReplaceable with ImplicitCastInputTypes with 
TimeZoneAwareExpression {
 
   override lazy val replacement: Expression = format.map { f =>
-    Cast(GetTimestamp(left, f, TimestampType, timeZoneId, ansiEnabled), 
DateType, timeZoneId,
-      EvalMode.fromBoolean(ansiEnabled))
+    Cast(GetTimestamp(left, f, TimestampType, "try_to_date", timeZoneId, 
ansiEnabled), DateType,
+      timeZoneId, EvalMode.fromBoolean(ansiEnabled))
   }.getOrElse(Cast(left, DateType, timeZoneId,
     EvalMode.fromBoolean(ansiEnabled))) // backwards compatibility
 
@@ -2179,7 +2181,7 @@ case class ParseToTimestamp(
   extends RuntimeReplaceable with ImplicitCastInputTypes with 
TimeZoneAwareExpression {
 
   override lazy val replacement: Expression = format.map { f =>
-    GetTimestamp(left, f, dataType, timeZoneId, failOnError = failOnError)
+    GetTimestamp(left, f, dataType, "try_to_timestamp", timeZoneId, 
failOnError = failOnError)
   }.getOrElse(Cast(left, dataType, timeZoneId, ansiEnabled = failOnError))
 
   def this(left: Expression, format: Expression) = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 061eaf45cffb..2ec85a38723c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -265,11 +265,13 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
       summary = "")
   }
 
-  def ansiDateTimeParseError(e: Exception): SparkDateTimeException = {
+  def ansiDateTimeParseError(e: Exception, suggestedFunc: String): 
SparkDateTimeException = {
     new SparkDateTimeException(
       errorClass = "CANNOT_PARSE_TIMESTAMP",
       messageParameters = Map(
-        "message" -> e.getMessage),
+        "message" -> e.getMessage,
+        "func" -> toSQLId(suggestedFunc)
+      ),
       context = Array.empty,
       summary = "")
   }
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_date_with_format.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_date_with_format.explain
index 3557274e9de8..51270c147549 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_date_with_format.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_date_with_format.explain
@@ -1,2 +1,2 @@
-Project [cast(gettimestamp(s#0, yyyy-MM-dd, TimestampType, 
Some(America/Los_Angeles), false) as date) AS to_date(s, yyyy-MM-dd)#0]
+Project [cast(gettimestamp(s#0, yyyy-MM-dd, TimestampType, try_to_date, 
Some(America/Los_Angeles), false) as date) AS to_date(s, yyyy-MM-dd)#0]
 +- LocalRelation <empty>, [d#0, t#0, s#0, x#0L, wt#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ltz_with_format.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ltz_with_format.explain
index e212c8d51a62..e66fdba89e0f 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ltz_with_format.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ltz_with_format.explain
@@ -1,2 +1,2 @@
-Project [gettimestamp(g#0, g#0, TimestampType, Some(America/Los_Angeles), 
false) AS to_timestamp_ltz(g, g)#0]
+Project [gettimestamp(g#0, g#0, TimestampType, try_to_timestamp, 
Some(America/Los_Angeles), false) AS to_timestamp_ltz(g, g)#0]
 +- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ntz_with_format.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ntz_with_format.explain
index 10ca240877fe..f133becf7823 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ntz_with_format.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_ntz_with_format.explain
@@ -1,2 +1,2 @@
-Project [gettimestamp(g#0, g#0, TimestampNTZType, Some(America/Los_Angeles), 
false) AS to_timestamp_ntz(g, g)#0]
+Project [gettimestamp(g#0, g#0, TimestampNTZType, try_to_timestamp, 
Some(America/Los_Angeles), false) AS to_timestamp_ntz(g, g)#0]
 +- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_with_format.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_with_format.explain
index 54e1c0348a3a..514b6705fa8e 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_with_format.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_to_timestamp_with_format.explain
@@ -1,2 +1,2 @@
-Project [gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, TimestampType, 
Some(America/Los_Angeles), false) AS to_timestamp(s, yyyy-MM-dd 
HH:mm:ss.SSSS)#0]
+Project [gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, TimestampType, 
try_to_timestamp, Some(America/Los_Angeles), false) AS to_timestamp(s, 
yyyy-MM-dd HH:mm:ss.SSSS)#0]
 +- LocalRelation <empty>, [d#0, t#0, s#0, x#0L, wt#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_try_to_timestamp.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_try_to_timestamp.explain
index 8074beab7db8..c4dd956e8342 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_try_to_timestamp.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_try_to_timestamp.explain
@@ -1,2 +1,2 @@
-Project [gettimestamp(g#0, g#0, TimestampType, Some(America/Los_Angeles), 
false) AS try_to_timestamp(g, g)#0]
+Project [gettimestamp(g#0, g#0, TimestampType, try_to_timestamp, 
Some(America/Los_Angeles), false) AS try_to_timestamp(g, g)#0]
 +- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_date.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_date.explain
index a1934253d93b..7ac1d31802ba 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_date.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_date.explain
@@ -1,2 +1,2 @@
-Project [unix_date(cast(gettimestamp(s#0, yyyy-MM-dd, TimestampType, 
Some(America/Los_Angeles), false) as date)) AS unix_date(to_date(s, 
yyyy-MM-dd))#0]
+Project [unix_date(cast(gettimestamp(s#0, yyyy-MM-dd, TimestampType, 
try_to_date, Some(America/Los_Angeles), false) as date)) AS 
unix_date(to_date(s, yyyy-MM-dd))#0]
 +- LocalRelation <empty>, [d#0, t#0, s#0, x#0L, wt#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_micros.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_micros.explain
index fb5cdd36f9b7..e5337b0f6c49 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_micros.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_micros.explain
@@ -1,2 +1,2 @@
-Project [unix_micros(gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, 
TimestampType, Some(America/Los_Angeles), false)) AS 
unix_micros(to_timestamp(s, yyyy-MM-dd HH:mm:ss.SSSS))#0L]
+Project [unix_micros(gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, 
TimestampType, try_to_timestamp, Some(America/Los_Angeles), false)) AS 
unix_micros(to_timestamp(s, yyyy-MM-dd HH:mm:ss.SSSS))#0L]
 +- LocalRelation <empty>, [d#0, t#0, s#0, x#0L, wt#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_millis.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_millis.explain
index 3382c9ed679c..5c852467a350 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_millis.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_millis.explain
@@ -1,2 +1,2 @@
-Project [unix_millis(gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, 
TimestampType, Some(America/Los_Angeles), false)) AS 
unix_millis(to_timestamp(s, yyyy-MM-dd HH:mm:ss.SSSS))#0L]
+Project [unix_millis(gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, 
TimestampType, try_to_timestamp, Some(America/Los_Angeles), false)) AS 
unix_millis(to_timestamp(s, yyyy-MM-dd HH:mm:ss.SSSS))#0L]
 +- LocalRelation <empty>, [d#0, t#0, s#0, x#0L, wt#0]
diff --git 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_seconds.explain
 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_seconds.explain
index d21c36886973..03d4386edda7 100644
--- 
a/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_seconds.explain
+++ 
b/sql/connect/common/src/test/resources/query-tests/explain-results/function_unix_seconds.explain
@@ -1,2 +1,2 @@
-Project [unix_seconds(gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, 
TimestampType, Some(America/Los_Angeles), false)) AS 
unix_seconds(to_timestamp(s, yyyy-MM-dd HH:mm:ss.SSSS))#0L]
+Project [unix_seconds(gettimestamp(s#0, yyyy-MM-dd HH:mm:ss.SSSS, 
TimestampType, try_to_timestamp, Some(America/Los_Angeles), false)) AS 
unix_seconds(to_timestamp(s, yyyy-MM-dd HH:mm:ss.SSSS))#0L]
 +- LocalRelation <empty>, [d#0, t#0, s#0, x#0L, wt#0]
diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/date.sql.out
index 8b892b1795a1..37cf05c6386d 100644
--- a/sql/core/src/test/resources/sql-tests/results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out
@@ -207,6 +207,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_date`",
     "message" : "Invalid date 'February 29' as '1970' is not a leap year"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out 
b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index 9a40d4fd1316..feae373b52ca 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -207,6 +207,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_date`",
     "message" : "Unparseable date: \"02-29\""
   }
 }
@@ -1584,6 +1585,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.\""
   }
 }
@@ -1599,6 +1601,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.0\""
   }
 }
@@ -1614,6 +1617,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1\""
   }
 }
@@ -1629,6 +1633,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.12\""
   }
 }
@@ -1644,6 +1649,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.123UTC\""
   }
 }
@@ -1659,6 +1665,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1234\""
   }
 }
@@ -1674,6 +1681,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.12345CST\""
   }
 }
@@ -1689,6 +1697,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.123456PST\""
   }
 }
@@ -1704,6 +1713,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1234567PST\""
   }
 }
@@ -1719,6 +1729,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"123456 2019-10-06 10:11:12.123456PST\""
   }
 }
@@ -1734,6 +1745,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"223456 2019-10-06 10:11:12.123456PST\""
   }
 }
@@ -1749,6 +1761,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.1234\""
   }
 }
@@ -1764,6 +1777,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.123\""
   }
 }
@@ -1779,6 +1793,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12\""
   }
 }
@@ -1794,6 +1809,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11:12.12\""
   }
 }
@@ -1809,6 +1825,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 10:11\""
   }
 }
@@ -1824,6 +1841,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06S10:11:12.12345\""
   }
 }
@@ -1839,6 +1857,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"12.12342019-10-06S10:11\""
   }
 }
@@ -1854,6 +1873,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"12.1232019-10-06S10:11\""
   }
 }
@@ -1869,6 +1889,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"12.1232019-10-06S10:11\""
   }
 }
@@ -1884,6 +1905,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"12.1234019-10-06S10:11\""
   }
 }
@@ -1955,6 +1977,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"02-29\""
   }
 }
@@ -2185,6 +2208,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Unparseable date: \"2019-10-06 A\""
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
index 9e3b0e2d1e0f..cdc027846c34 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
@@ -18,6 +18,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '1' could not be parsed at index 0"
   }
 }
@@ -33,6 +34,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '-12' could not be parsed at index 0"
   }
 }
@@ -48,6 +50,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '123' could not be parsed, unparsed text found at index 
2"
   }
 }
@@ -63,6 +66,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '1' could not be parsed at index 0"
   }
 }
@@ -95,6 +99,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year"
   }
 }
@@ -110,6 +115,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '9' could not be parsed at index 0"
   }
 }
@@ -125,6 +131,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year"
   }
 }
@@ -140,6 +147,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '9' could not be parsed at index 0"
   }
 }
@@ -155,6 +163,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '99' could not be parsed at index 0"
   }
 }
@@ -170,6 +179,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31."
   }
 }
@@ -185,6 +195,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Conflict found: Field MonthOfYear 11 differs from MonthOfYear 
12 derived from 1970-12-31."
   }
 }
@@ -200,6 +211,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2019-366' could not be parsed: Invalid date 'DayOfYear 
366' as '2019' is not a leap year"
   }
 }
@@ -215,6 +227,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31."
   }
 }
@@ -230,6 +243,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2020-01-365' could not be parsed: Conflict found: Field 
DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30"
   }
 }
@@ -245,6 +259,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2020-10-350' could not be parsed: Conflict found: Field 
MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15"
   }
 }
@@ -260,6 +275,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2020-11-31-366' could not be parsed: Invalid date 
'NOVEMBER 31'"
   }
 }
@@ -283,6 +299,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_date`",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -298,6 +315,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_date`",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
@@ -313,6 +331,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -328,6 +347,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
@@ -343,6 +363,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -358,6 +379,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
@@ -373,6 +395,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 
10"
   }
 }
@@ -388,6 +411,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text 'Unparseable' could not be parsed at index 0"
   }
 }
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index 69025271601e..06a8fbad8a29 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -395,6 +395,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2019-10-06 10:11:12.' could not be parsed at index 20"
   }
 }
@@ -466,6 +467,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2019-10-06 10:11:12.1234567PST' could not be parsed, 
unparsed text found at index 26"
   }
 }
@@ -489,6 +491,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '223456 2019-10-06 10:11:12.123456PST' could not be 
parsed at index 27"
   }
 }
@@ -560,6 +563,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 7"
   }
 }
@@ -575,6 +579,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 9"
   }
 }
@@ -654,6 +659,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Invalid date 'February 29' as '1970' is not a leap year"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 14835ae0c592..933982f5cff6 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -409,6 +409,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2019-10-06 10:11:12.' could not be parsed at index 20"
   }
 }
@@ -480,6 +481,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '2019-10-06 10:11:12.1234567PST' could not be parsed, 
unparsed text found at index 26"
   }
 }
@@ -503,6 +505,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '223456 2019-10-06 10:11:12.123456PST' could not be 
parsed at index 27"
   }
 }
@@ -574,6 +577,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 7"
   }
 }
@@ -589,6 +593,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 9"
   }
 }
@@ -668,6 +673,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Invalid date 'February 29' as '1970' is not a leap year"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
index 0b043c70cc23..ea17c7f5289f 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
@@ -376,6 +376,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text 'aa' could not be parsed at index 0"
   }
 }
@@ -408,6 +409,7 @@ org.apache.spark.SparkDateTimeException
   "errorClass" : "CANNOT_PARSE_TIMESTAMP",
   "sqlState" : "22007",
   "messageParameters" : {
+    "func" : "`try_to_timestamp`",
     "message" : "Text 'aa' could not be parsed at index 0"
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index 52ef662080fb..fde5a32e722f 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -239,7 +239,9 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
         sql("select to_timestamp('abc', 'yyyy-MM-dd HH:mm:ss')").collect()
       },
       condition = "CANNOT_PARSE_TIMESTAMP",
-      parameters = Map("message" -> "Text 'abc' could not be parsed at index 
0")
+      parameters = Map(
+        "func" -> "`try_to_timestamp`",
+        "message" -> "Text 'abc' could not be parsed at index 0")
     )
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to