This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8a0b101  [SPARK-38112][SQL] Use error classes in the execution errors 
of date/timestamp handling
8a0b101 is described below

commit 8a0b101aa46976ccc36ff457319f8e38964f7dfd
Author: Tengfei Huang <tengfe...@gmail.com>
AuthorDate: Tue Mar 8 18:15:03 2022 +0300

    [SPARK-38112][SQL] Use error classes in the execution errors of 
date/timestamp handling
    
    ### What changes were proposed in this pull request?
    Migrate the following errors in QueryExecutionErrors onto use error classes:
    - sparkUpgradeInReadingDatesError => INCONSISTENT_BEHAVIOR_CROSS_VERSION
    - sparkUpgradeInWritingDatesError => INCONSISTENT_BEHAVIOR_CROSS_VERSION
    - timeZoneIdNotSpecifiedForTimestampTypeError => UNSUPPORTED_OPERATION
    - cannotConvertOrcTimestampToTimestampNTZError => UNSUPPORTED_OPERATION
    
    ### Why are the changes needed?
    Porting date/timestamp execute errors to new error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    UT added.
    
    Closes #35670 from ivoson/SPARK-38112-Rebase.
    
    Lead-authored-by: Tengfei Huang <tengfe...@gmail.com>
    Co-authored-by: Huang Tengfei <tengfe...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |   6 ++
 .../scala/org/apache/spark/SparkException.scala    |  19 +++-
 .../spark/sql/errors/QueryExecutionErrors.scala    |  67 ++++++++-----
 .../expressions/DateExpressionsSuite.scala         |   2 +-
 .../resources/sql-tests/results/ansi/date.sql.out  |   6 +-
 .../results/ansi/datetime-parsing-invalid.sql.out  |  16 +--
 .../sql-tests/results/ansi/timestamp.sql.out       |  12 +--
 .../test/resources/sql-tests/results/date.sql.out  |   6 +-
 .../results/datetime-formatting-invalid.sql.out    |  44 ++++-----
 .../results/datetime-parsing-invalid.sql.out       |  16 +--
 .../sql-tests/results/json-functions.sql.out       |   4 +-
 .../resources/sql-tests/results/timestamp.sql.out  |  12 +--
 .../results/timestampNTZ/timestamp-ansi.sql.out    |   2 +-
 .../results/timestampNTZ/timestamp.sql.out         |   2 +-
 .../native/stringCastAndExpressions.sql.out        |   6 +-
 .../org/apache/spark/sql/DateFunctionsSuite.scala  |   4 +-
 .../sql/errors/QueryExecutionErrorsSuite.scala     | 109 ++++++++++++++++++++-
 .../execution/datasources/orc/OrcQuerySuite.scala  |  26 -----
 .../sql/execution/datasources/orc/OrcTest.scala    |   2 +-
 19 files changed, 238 insertions(+), 123 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 663454f..55e9373 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -70,6 +70,9 @@
   "INCOMPATIBLE_DATASOURCE_REGISTER" : {
     "message" : [ "Detected an incompatible DataSourceRegister. Please remove 
the incompatible library from classpath or upgrade it. Error: %s" ]
   },
+  "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : {
+    "message" : [ "You may get a different result due to the upgrading to 
Spark >= %s: %s" ]
+  },
   "INDEX_OUT_OF_BOUNDS" : {
     "message" : [ "Index %s must be between 0 and the length of the 
ArrayData." ],
     "sqlState" : "22023"
@@ -162,6 +165,9 @@
   "UNSUPPORTED_GROUPING_EXPRESSION" : {
     "message" : [ "grouping()/grouping_id() can only be used with 
GroupingSets/Cube/Rollup" ]
   },
+  "UNSUPPORTED_OPERATION" : {
+    "message" : [ "The operation is not supported: %s" ]
+  },
   "WRITING_JOB_ABORTED" : {
     "message" : [ "Writing job aborted" ],
     "sqlState" : "40000"
diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala 
b/core/src/main/scala/org/apache/spark/SparkException.scala
index aea09e3..8442c8e 100644
--- a/core/src/main/scala/org/apache/spark/SparkException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkException.scala
@@ -71,9 +71,22 @@ private[spark] case class ExecutorDeadException(message: 
String)
 /**
  * Exception thrown when Spark returns different result after upgrading to a 
new version.
  */
-private[spark] class SparkUpgradeException(version: String, message: String, 
cause: Throwable)
-  extends RuntimeException("You may get a different result due to the 
upgrading of Spark" +
-    s" $version: $message", cause)
+private[spark] class SparkUpgradeException(
+    errorClass: String,
+    messageParameters: Array[String],
+    cause: Throwable)
+  extends RuntimeException(SparkThrowableHelper.getMessage(errorClass, 
messageParameters), cause)
+    with SparkThrowable {
+
+  def this(version: String, message: String, cause: Throwable) =
+    this (
+      errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION",
+      messageParameters = Array(version, message),
+      cause = cause
+    )
+
+  override def getErrorClass: String = errorClass
+}
 
 /**
  * Arithmetic exception thrown from Spark with an error class.
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 6424b32..62b9616 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -533,30 +533,43 @@ object QueryExecutionErrors {
 
   def sparkUpgradeInReadingDatesError(
       format: String, config: String, option: String): SparkUpgradeException = 
{
-    new SparkUpgradeException("3.0",
-      s"""
-         |reading dates before 1582-10-15 or timestamps before 
1900-01-01T00:00:00Z from $format
-         |files can be ambiguous, as the files may be written by Spark 2.x or 
legacy versions of
-         |Hive, which uses a legacy hybrid calendar that is different from 
Spark 3.0+'s Proleptic
-         |Gregorian calendar. See more details in SPARK-31404. You can set the 
SQL config
-         |'$config' or the datasource option '$option' to 'LEGACY' to rebase 
the datetime values
-         |w.r.t. the calendar difference during reading. To read the datetime 
values as it is,
-         |set the SQL config '$config' or the datasource option '$option' to 
'CORRECTED'.
-       """.stripMargin, null)
+    new SparkUpgradeException(
+      errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION",
+      messageParameters = Array(
+        "3.0",
+        s"""
+           |reading dates before 1582-10-15 or timestamps before 
1900-01-01T00:00:00Z
+           |from $format files can be ambiguous, as the files may be written by
+           |Spark 2.x or legacy versions of Hive, which uses a legacy hybrid 
calendar
+           |that is different from Spark 3.0+'s Proleptic Gregorian calendar.
+           |See more details in SPARK-31404. You can set the SQL config 
'$config' or
+           |the datasource option '$option' to 'LEGACY' to rebase the datetime 
values
+           |w.r.t. the calendar difference during reading. To read the 
datetime values
+           |as it is, set the SQL config '$config' or the datasource option 
'$option'
+           |to 'CORRECTED'.
+           |""".stripMargin),
+      cause = null
+    )
   }
 
   def sparkUpgradeInWritingDatesError(format: String, config: String): 
SparkUpgradeException = {
-    new SparkUpgradeException("3.0",
-      s"""
-         |writing dates before 1582-10-15 or timestamps before 
1900-01-01T00:00:00Z into $format
-         |files can be dangerous, as the files may be read by Spark 2.x or 
legacy versions of Hive
-         |later, which uses a legacy hybrid calendar that is different from 
Spark 3.0+'s Proleptic
-         |Gregorian calendar. See more details in SPARK-31404. You can set 
$config to 'LEGACY' to
-         |rebase the datetime values w.r.t. the calendar difference during 
writing, to get maximum
-         |interoperability. Or set $config to 'CORRECTED' to write the 
datetime values as it is,
-         |if you are 100% sure that the written files will only be read by 
Spark 3.0+ or other
-         |systems that use Proleptic Gregorian calendar.
-       """.stripMargin, null)
+    new SparkUpgradeException(
+      errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION",
+      messageParameters = Array(
+        "3.0",
+        s"""
+           |writing dates before 1582-10-15 or timestamps before 
1900-01-01T00:00:00Z
+           |into $format files can be dangerous, as the files may be read by 
Spark 2.x
+           |or legacy versions of Hive later, which uses a legacy hybrid 
calendar that
+           |is different from Spark 3.0+'s Proleptic Gregorian calendar. See 
more
+           |details in SPARK-31404. You can set $config to 'LEGACY' to rebase 
the
+           |datetime values w.r.t. the calendar difference during writing, to 
get maximum
+           |interoperability. Or set $config to 'CORRECTED' to write the 
datetime values
+           |as it is, if you are 100% sure that the written files will only be 
read by
+           |Spark 3.0+ or other systems that use Proleptic Gregorian calendar.
+           |""".stripMargin),
+      cause = null
+    )
   }
 
   def buildReaderUnsupportedForFileFormatError(format: String): Throwable = {
@@ -1617,8 +1630,12 @@ object QueryExecutionErrors {
   }
 
   def timeZoneIdNotSpecifiedForTimestampTypeError(): Throwable = {
-    new UnsupportedOperationException(
-      s"${TimestampType.catalogString} must supply timeZoneId parameter")
+    new SparkUnsupportedOperationException(
+      errorClass = "UNSUPPORTED_OPERATION",
+      messageParameters = Array(
+        s"${TimestampType.catalogString} must supply timeZoneId parameter " +
+          s"while converting to ArrowType")
+    )
   }
 
   def notPublicClassError(name: String): Throwable = {
@@ -1932,7 +1949,9 @@ object QueryExecutionErrors {
   }
 
   def cannotConvertOrcTimestampToTimestampNTZError(): Throwable = {
-    new RuntimeException("Unable to convert timestamp of Orc to data type 
'timestamp_ntz'")
+    new SparkUnsupportedOperationException(
+      errorClass = "UNSUPPORTED_OPERATION",
+      messageParameters = Array("Unable to convert timestamp of Orc to data 
type 'timestamp_ntz'"))
   }
 
   def writePartitionExceedConfigSizeWhenDynamicPartitionError(
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index deb7203..ed4e934 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -1737,7 +1737,7 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
             exprSeq2.foreach(pair =>
               checkExceptionInExpression[SparkUpgradeException](
                 pair._1,
-                  "You may get a different result due to the upgrading of 
Spark 3.0"))
+                  "You may get a different result due to the upgrading to 
Spark >= 3.0"))
           } else {
             if (ansiEnabled) {
               exprSeq2.foreach(pair =>
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index a21512f..36cf228 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -641,7 +641,7 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -650,7 +650,7 @@ select from_json('{"d":"26/October/2015"}', 'd Date', 
map('dateFormat', 'dd/MMMM
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -659,7 +659,7 @@ select from_csv('26/October/2015', 'd Date', 
map('dateFormat', 'dd/MMMMM/yyyy'))
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index e6dd07b..5dc3b85 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -17,7 +17,7 @@ select to_timestamp('1', 'yy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -35,7 +35,7 @@ select to_timestamp('123', 'yy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '123' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '123' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -44,7 +44,7 @@ select to_timestamp('1', 'yyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -53,7 +53,7 @@ select to_timestamp('1234567', 'yyyyyyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -71,7 +71,7 @@ select to_timestamp('9', 'DD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -89,7 +89,7 @@ select to_timestamp('9', 'DDD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -98,7 +98,7 @@ select to_timestamp('99', 'DDD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '99' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '99' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -170,7 +170,7 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 
'yyyy-DDD'))
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '2018-366' in the new parser. You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '2018-366' in the new parser. You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
index 2c47ed3..dc25ed9 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
@@ -725,7 +725,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -734,7 +734,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy 
EEEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -743,7 +743,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -752,7 +752,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy 
EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -761,7 +761,7 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', 
map('timestampFormat'
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -770,7 +770,7 @@ select from_csv('26/October/2015', 't Timestamp', 
map('timestampFormat', 'dd/MMM
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/date.sql.out
index bd32361..ad6421a 100644
--- a/sql/core/src/test/resources/sql-tests/results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out
@@ -640,7 +640,7 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -649,7 +649,7 @@ select from_json('{"d":"26/October/2015"}', 'd Date', 
map('dateFormat', 'dd/MMMM
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -658,7 +658,7 @@ select from_csv('26/October/2015', 'd Date', 
map('dateFormat', 'dd/MMMMM/yyyy'))
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
index 9c8553d..6649ae3 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
@@ -8,7 +8,7 @@ select date_format('2018-11-17 13:33:33.333', 'GGGGG')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'GGGGG' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'GGGGG' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -17,7 +17,7 @@ select date_format('2018-11-17 13:33:33.333', 'yyyyyyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -44,7 +44,7 @@ select date_format('2018-11-17 13:33:33.333', 'MMMMM')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'MMMMM' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'MMMMM' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -53,7 +53,7 @@ select date_format('2018-11-17 13:33:33.333', 'LLLLL')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'LLLLL' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'LLLLL' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -62,7 +62,7 @@ select date_format('2018-11-17 13:33:33.333', 'EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -71,7 +71,7 @@ select date_format('2018-11-17 13:33:33.333', 'FF')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'FF' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'FF' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -80,7 +80,7 @@ select date_format('2018-11-17 13:33:33.333', 'ddd')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'ddd' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'ddd' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -89,7 +89,7 @@ select date_format('2018-11-17 13:33:33.333', 'DDDD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'DDDD' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'DDDD' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -98,7 +98,7 @@ select date_format('2018-11-17 13:33:33.333', 'HHH')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'HHH' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'HHH' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -107,7 +107,7 @@ select date_format('2018-11-17 13:33:33.333', 'hhh')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'hhh' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'hhh' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -116,7 +116,7 @@ select date_format('2018-11-17 13:33:33.333', 'kkk')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'kkk' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'kkk' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -125,7 +125,7 @@ select date_format('2018-11-17 13:33:33.333', 'KKK')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'KKK' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'KKK' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -134,7 +134,7 @@ select date_format('2018-11-17 13:33:33.333', 'mmm')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'mmm' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'mmm' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -143,7 +143,7 @@ select date_format('2018-11-17 13:33:33.333', 'sss')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'sss' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'sss' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -152,7 +152,7 @@ select date_format('2018-11-17 13:33:33.333', 'SSSSSSSSSS')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'SSSSSSSSSS' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'SSSSSSSSSS' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -161,7 +161,7 @@ select date_format('2018-11-17 13:33:33.333', 'aa')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -179,7 +179,7 @@ select date_format('2018-11-17 13:33:33.333', 'zzzzz')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'zzzzz' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'zzzzz' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -197,7 +197,7 @@ select date_format('2018-11-17 13:33:33.333', 'ZZZZZZ')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'ZZZZZZ' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'ZZZZZZ' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -260,7 +260,7 @@ select date_format('2018-11-17 13:33:33.333', 'Y')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'Y' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'Y' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -269,7 +269,7 @@ select date_format('2018-11-17 13:33:33.333', 'w')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'w' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'w' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -278,7 +278,7 @@ select date_format('2018-11-17 13:33:33.333', 'W')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'W' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'W' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -287,7 +287,7 @@ select date_format('2018-11-17 13:33:33.333', 'u')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'u' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'u' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
index c1e1a2c..3350470 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
@@ -17,7 +17,7 @@ select to_timestamp('1', 'yy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -34,7 +34,7 @@ select to_timestamp('123', 'yy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '123' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '123' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -43,7 +43,7 @@ select to_timestamp('1', 'yyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -52,7 +52,7 @@ select to_timestamp('1234567', 'yyyyyyy')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -69,7 +69,7 @@ select to_timestamp('9', 'DD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -86,7 +86,7 @@ select to_timestamp('9', 'DDD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -95,7 +95,7 @@ select to_timestamp('99', 'DDD')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '99' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '99' in the new parser. You can set spark.sql.legacy.timeParserPolicy to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string.
 
 
 -- !query
@@ -160,7 +160,7 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 
'yyyy-DDD'))
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '2018-366' in the new parser. You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '2018-366' in the new parser. You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
index e509d4e..8461083 100644
--- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
@@ -339,7 +339,7 @@ select from_json(
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '02-29' in the new parser. You can set spark.sql.legacy.timeParserPolicy 
to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and 
treat it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '02-29' in the new parser. You can set spark.sql.legacy.timeParserPolicy 
to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and 
treat it as an invalid datetime string.
 
 
 -- !query
@@ -351,7 +351,7 @@ select from_json(
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
parse '02-29' in the new parser. You can set spark.sql.legacy.timeParserPolicy 
to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and 
treat it as an invalid datetime string.
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
parse '02-29' in the new parser. You can set spark.sql.legacy.timeParserPolicy 
to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and 
treat it as an invalid datetime string.
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index 6362a2a..282e763 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -719,7 +719,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -728,7 +728,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy 
EEEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -737,7 +737,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -746,7 +746,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy 
EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -755,7 +755,7 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', 
map('timestampFormat'
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -764,7 +764,7 @@ select from_csv('26/October/2015', 't Timestamp', 
map('timestampFormat', 'dd/MMM
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 46b51fc..95120a8 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -752,7 +752,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy 
EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
index adadae5..0364f55 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
@@ -746,7 +746,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy 
EEEEE')
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
index 14e941c..fd4f8b2 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
@@ -139,7 +139,7 @@ select to_timestamp('2018-01-01', a) from t
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -156,7 +156,7 @@ select to_unix_timestamp('2018-01-01', a) from t
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
@@ -173,7 +173,7 @@ select unix_timestamp('2018-01-01', a) from t
 struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
-You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+You may get a different result due to the upgrading to Spark >= 3.0: Fail to 
recognize 'aa' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
 
 
 -- !query
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
index 762bc15..fa246fa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
@@ -516,7 +516,7 @@ class DateFunctionsSuite extends QueryTest with 
SharedSparkSession {
       Seq(Row(null), Row(null), Row(null)))
     val e = intercept[SparkUpgradeException](df.select(to_date(col("s"), 
"yyyy-dd-aa")).collect())
     assert(e.getCause.isInstanceOf[IllegalArgumentException])
-    assert(e.getMessage.contains("You may get a different result due to the 
upgrading of Spark"))
+    assert(e.getMessage.contains("You may get a different result due to the 
upgrading to Spark"))
 
     // February
     val x1 = "2016-02-29"
@@ -699,7 +699,7 @@ class DateFunctionsSuite extends QueryTest with 
SharedSparkSession {
           val e = intercept[SparkUpgradeException](invalid.collect())
           assert(e.getCause.isInstanceOf[IllegalArgumentException])
           assert(
-            e.getMessage.contains("You may get a different result due to the 
upgrading of Spark"))
+            e.getMessage.contains("You may get a different result due to the 
upgrading to Spark"))
         }
 
         // February
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 429e41e..eb1b066 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -17,12 +17,22 @@
 
 package org.apache.spark.sql.errors
 
-import org.apache.spark.{SparkException, SparkIllegalArgumentException, 
SparkRuntimeException, SparkUnsupportedOperationException}
-import org.apache.spark.sql.{DataFrame, QueryTest}
+import java.sql.Timestamp
+
+import org.apache.spark.{SparkException, SparkIllegalArgumentException, 
SparkRuntimeException, SparkUnsupportedOperationException, 
SparkUpgradeException}
+import org.apache.spark.sql.{DataFrame, QueryTest, Row}
+import org.apache.spark.sql.execution.datasources.orc.OrcTest
+import org.apache.spark.sql.execution.datasources.parquet.ParquetTest
 import org.apache.spark.sql.functions.{lit, lower, struct, sum}
+import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.EXCEPTION
 import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.sql.types.{StructField, StructType, TimestampNTZType, 
TimestampType}
+import org.apache.spark.sql.util.ArrowUtils
+
+class QueryExecutionErrorsSuite extends QueryTest
+  with ParquetTest with OrcTest with SharedSparkSession {
 
-class QueryExecutionErrorsSuite extends QueryTest with SharedSparkSession {
   import testImplicits._
 
   private def getAesInputs(): (DataFrame, DataFrame) = {
@@ -181,4 +191,97 @@ class QueryExecutionErrorsSuite extends QueryTest with 
SharedSparkSession {
     assert(e2.getSqlState === "0A000")
     assert(e2.getMessage === "The feature is not supported: Pivot not after a 
groupBy.")
   }
+
+  test("INCONSISTENT_BEHAVIOR_CROSS_VERSION: " +
+    "compatibility with Spark 2.4/3.2 in reading/writing dates") {
+
+    // Fail to read ancient datetime values.
+    withSQLConf(SQLConf.PARQUET_REBASE_MODE_IN_READ.key -> EXCEPTION.toString) 
{
+      val fileName = "before_1582_date_v2_4_5.snappy.parquet"
+      val filePath = getResourceParquetFilePath("test-data/" + fileName)
+      val e = intercept[SparkException] {
+        spark.read.parquet(filePath).collect()
+      }.getCause.asInstanceOf[SparkUpgradeException]
+
+      val format = "Parquet"
+      val config = SQLConf.PARQUET_REBASE_MODE_IN_READ.key
+      val option = "datetimeRebaseMode"
+      assert(e.getErrorClass === "INCONSISTENT_BEHAVIOR_CROSS_VERSION")
+      assert(e.getMessage ===
+        "You may get a different result due to the upgrading to Spark >= 3.0: 
" +
+          s"""
+             |reading dates before 1582-10-15 or timestamps before 
1900-01-01T00:00:00Z
+             |from $format files can be ambiguous, as the files may be written 
by
+             |Spark 2.x or legacy versions of Hive, which uses a legacy hybrid 
calendar
+             |that is different from Spark 3.0+'s Proleptic Gregorian calendar.
+             |See more details in SPARK-31404. You can set the SQL config 
'$config' or
+             |the datasource option '$option' to 'LEGACY' to rebase the 
datetime values
+             |w.r.t. the calendar difference during reading. To read the 
datetime values
+             |as it is, set the SQL config '$config' or the datasource option 
'$option'
+             |to 'CORRECTED'.
+             |""".stripMargin)
+    }
+
+    // Fail to write ancient datetime values.
+    withSQLConf(SQLConf.PARQUET_REBASE_MODE_IN_WRITE.key -> 
EXCEPTION.toString) {
+      withTempPath { dir =>
+        val df = Seq(java.sql.Date.valueOf("1001-01-01")).toDF("dt")
+        val e = intercept[SparkException] {
+          df.write.parquet(dir.getCanonicalPath)
+        }.getCause.getCause.getCause.asInstanceOf[SparkUpgradeException]
+
+        val format = "Parquet"
+        val config = SQLConf.PARQUET_REBASE_MODE_IN_WRITE.key
+        assert(e.getErrorClass === "INCONSISTENT_BEHAVIOR_CROSS_VERSION")
+        assert(e.getMessage ===
+          "You may get a different result due to the upgrading to Spark >= 
3.0: " +
+            s"""
+               |writing dates before 1582-10-15 or timestamps before 
1900-01-01T00:00:00Z
+               |into $format files can be dangerous, as the files may be read 
by Spark 2.x
+               |or legacy versions of Hive later, which uses a legacy hybrid 
calendar that
+               |is different from Spark 3.0+'s Proleptic Gregorian calendar. 
See more
+               |details in SPARK-31404. You can set $config to 'LEGACY' to 
rebase the
+               |datetime values w.r.t. the calendar difference during writing, 
to get maximum
+               |interoperability. Or set $config to 'CORRECTED' to write the 
datetime values
+               |as it is, if you are 100% sure that the written files will 
only be read by
+               |Spark 3.0+ or other systems that use Proleptic Gregorian 
calendar.
+               |""".stripMargin)
+      }
+    }
+  }
+
+  test("UNSUPPORTED_OPERATION: timeZoneId not specified while converting 
TimestampType to Arrow") {
+    val schema = new StructType().add("value", TimestampType)
+    val e = intercept[SparkUnsupportedOperationException] {
+      ArrowUtils.toArrowSchema(schema, null)
+    }
+
+    assert(e.getErrorClass === "UNSUPPORTED_OPERATION")
+    assert(e.getMessage === "The operation is not supported: " +
+      "timestamp must supply timeZoneId parameter while converting to 
ArrowType")
+  }
+
+  test("UNSUPPORTED_OPERATION - SPARK-36346: can't read Timestamp as 
TimestampNTZ") {
+    val data = (1 to 10).map { i =>
+      val ts = new Timestamp(i)
+      Row(ts)
+    }
+
+    val actualSchema = StructType(Seq(StructField("time", TimestampType, 
false)))
+    val providedSchema = StructType(Seq(StructField("time", TimestampNTZType, 
false)))
+
+    withTempPath { file =>
+      val df = spark.createDataFrame(sparkContext.parallelize(data), 
actualSchema)
+      df.write.orc(file.getCanonicalPath)
+      withAllNativeOrcReaders {
+        val e = intercept[SparkException] {
+          
spark.read.schema(providedSchema).orc(file.getCanonicalPath).collect()
+        }.getCause.asInstanceOf[SparkUnsupportedOperationException]
+
+        assert(e.getErrorClass === "UNSUPPORTED_OPERATION")
+        assert(e.getMessage === "The operation is not supported: " +
+          "Unable to convert timestamp of Orc to data type 'timestamp_ntz'")
+      }
+    }
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala
index 551a3f5..280a880 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala
@@ -804,32 +804,6 @@ abstract class OrcQuerySuite extends OrcQueryTest with 
SharedSparkSession {
     }
   }
 
-  test("SPARK-36346: can't read TimestampLTZ as TimestampNTZ") {
-    val data = (1 to 10).map { i =>
-      val ts = new Timestamp(i)
-      Row(ts)
-    }
-    val answer = (1 to 10).map { i =>
-      // The second parameter is `nanoOfSecond`, while java.sql.Timestamp 
accepts milliseconds
-      // as input. So here we multiple the `nanoOfSecond` by NANOS_PER_MILLIS
-      val ts = LocalDateTime.ofEpochSecond(0, i * 1000000, ZoneOffset.UTC)
-      Row(ts)
-    }
-    val actualSchema = StructType(Seq(StructField("time", TimestampType, 
false)))
-    val providedSchema = StructType(Seq(StructField("time", TimestampNTZType, 
false)))
-
-    withTempPath { file =>
-      val df = spark.createDataFrame(sparkContext.parallelize(data), 
actualSchema)
-      df.write.orc(file.getCanonicalPath)
-      withAllNativeOrcReaders {
-        val msg = intercept[SparkException] {
-          
spark.read.schema(providedSchema).orc(file.getCanonicalPath).collect()
-        }.getMessage
-        assert(msg.contains("Unable to convert timestamp of Orc to data type 
'timestamp_ntz'"))
-      }
-    }
-  }
-
   test("SPARK-36346: read TimestampNTZ as TimestampLTZ") {
     val data = (1 to 10).map { i =>
       // The second parameter is `nanoOfSecond`, while java.sql.Timestamp 
accepts milliseconds
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
index 96932de..c36bfd9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
@@ -47,7 +47,7 @@ import 
org.apache.spark.sql.internal.SQLConf.ORC_IMPLEMENTATION
  *       -> HiveOrcPartitionDiscoverySuite
  *   -> OrcFilterSuite
  */
-abstract class OrcTest extends QueryTest with FileBasedDataSourceTest with 
BeforeAndAfterAll {
+trait OrcTest extends QueryTest with FileBasedDataSourceTest with 
BeforeAndAfterAll {
 
   val orcImp: String = "native"
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to