This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new db7f346729d [SPARK-39085][SQL] Move the error message of `INCONSISTENT_BEHAVIOR_CROSS_VERSION` to error-classes.json db7f346729d is described below commit db7f346729d481f6ea6fcc88e381fda33de9b3f1 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Tue May 3 08:28:27 2022 +0300 [SPARK-39085][SQL] Move the error message of `INCONSISTENT_BEHAVIOR_CROSS_VERSION` to error-classes.json ### What changes were proposed in this pull request? In the PR, I propose to create two new sub-classes of the error class `INCONSISTENT_BEHAVIOR_CROSS_VERSION`: - READ_ANCIENT_DATETIME - WRITE_ANCIENT_DATETIME and move their error messages from source code to the json file `error-classes.json`. ### Why are the changes needed? 1. To improve maintainability of error messages in the one place. 2. To follow the general rule that bodies of error messages should be placed to the json file, and only parameters are passed from source code. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running the modified test suite: ``` $ build/sbt "sql/testOnly *QueryExecutionErrorsSuite*" $ build/sbt "test:testOnly *SparkThrowableSuite" $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite" $ build/sbt "test:testOnly *DateFormatterSuite" $ build/sbt "test:testOnly *DateExpressionsSuite" $ build/sbt "test:testOnly *TimestampFormatterSuite" ``` Closes #36426 from MaxGekk/error-subclass-INCONSISTENT_BEHAVIOR_CROSS_VERSION. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- core/src/main/resources/error/error-classes.json | 19 +++++- .../scala/org/apache/spark/SparkException.scala | 7 --- .../spark/sql/errors/QueryExecutionErrors.scala | 67 ++++++++++------------ .../resources/sql-tests/results/ansi/date.sql.out | 9 ++- .../results/ansi/datetime-parsing-invalid.sql.out | 24 +++++--- .../sql-tests/results/ansi/timestamp.sql.out | 18 ++++-- .../test/resources/sql-tests/results/date.sql.out | 9 ++- .../results/datetime-formatting-invalid.sql.out | 66 ++++++++++++++------- .../results/datetime-parsing-invalid.sql.out | 24 +++++--- .../sql-tests/results/json-functions.sql.out | 6 +- .../resources/sql-tests/results/timestamp.sql.out | 18 ++++-- .../results/timestampNTZ/timestamp-ansi.sql.out | 3 +- .../results/timestampNTZ/timestamp.sql.out | 3 +- .../native/stringCastAndExpressions.sql.out | 9 ++- .../sql/errors/QueryExecutionErrorsSuite.scala | 4 +- 15 files changed, 177 insertions(+), 109 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index eacbeec570f..24b50c4209a 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -79,7 +79,24 @@ "message" : [ "Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: <message>" ] }, "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : { - "message" : [ "You may get a different result due to the upgrading to Spark >= <sparkVersion>: <details>" ] + "message" : [ "You may get a different result due to the upgrading to" ], + "subClass" : { + "DATETIME_PATTERN_RECOGNITION" : { + "message" : [ " Spark >= 3.0: \nFail to recognize <pattern> pattern in the DateTimeFormatter. 1) You can set <config> to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" ] + }, + "FORMAT_DATETIME_BY_NEW_PARSER" : { + "message" : [ " Spark >= 3.0: \nFail to format it to <resultCandidate> in the new formatter. You can set\n<config> to 'LEGACY' to restore the behavior before\nSpark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string.\n" ] + }, + "PARSE_DATETIME_BY_NEW_PARSER" : { + "message" : [ " Spark >= 3.0: \nFail to parse <datetime> in the new parser. You can set <config> to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string." ] + }, + "READ_ANCIENT_DATETIME" : { + "message" : [ " Spark >= 3.0: \nreading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z\nfrom <format> files can be ambiguous, as the files may be written by\nSpark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar\nthat is different from Spark 3.0+'s Proleptic Gregorian calendar.\nSee more details in SPARK-31404. You can set the SQL config <config> or\nthe datasource option '<option>' to 'LEGACY' to rebase the datetime values\nw.r.t. the calen [...] + }, + "WRITE_ANCIENT_DATETIME" : { + "message" : [ " Spark >= 3.0: \nwriting dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z\ninto <format> files can be dangerous, as the files may be read by Spark 2.x\nor legacy versions of Hive later, which uses a legacy hybrid calendar that\nis different from Spark 3.0+'s Proleptic Gregorian calendar. See more\ndetails in SPARK-31404. You can set <config> to 'LEGACY' to rebase the\ndatetime values w.r.t. the calendar difference during writing, to get maximum\nin [...] + } + } }, "INDEX_OUT_OF_BOUNDS" : { "message" : [ "Index <indexValue> must be between 0 and the length of the ArrayData." ], diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala b/core/src/main/scala/org/apache/spark/SparkException.scala index 8442c8eb8d3..a846e6c46a2 100644 --- a/core/src/main/scala/org/apache/spark/SparkException.scala +++ b/core/src/main/scala/org/apache/spark/SparkException.scala @@ -78,13 +78,6 @@ private[spark] class SparkUpgradeException( extends RuntimeException(SparkThrowableHelper.getMessage(errorClass, messageParameters), cause) with SparkThrowable { - def this(version: String, message: String, cause: Throwable) = - this ( - errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", - messageParameters = Array(version, message), - cause = cause - ) - override def getErrorClass: String = errorClass } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 4b8d76e8e6f..53d32927cee 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -534,18 +534,12 @@ object QueryExecutionErrors extends QueryErrorsBase { new SparkUpgradeException( errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", messageParameters = Array( - "3.0", - s""" - |reading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z - |from $format files can be ambiguous, as the files may be written by - |Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar - |that is different from Spark 3.0+'s Proleptic Gregorian calendar. - |See more details in SPARK-31404. You can set the SQL config ${toSQLConf(config)} or - |the datasource option '$option' to 'LEGACY' to rebase the datetime values - |w.r.t. the calendar difference during reading. To read the datetime values - |as it is, set the SQL config ${toSQLConf(config)} or the datasource option '$option' - |to 'CORRECTED'. - |""".stripMargin), + "READ_ANCIENT_DATETIME", + format, + toSQLConf(config), + option, + toSQLConf(config), + option), cause = null ) } @@ -554,18 +548,10 @@ object QueryExecutionErrors extends QueryErrorsBase { new SparkUpgradeException( errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", messageParameters = Array( - "3.0", - s""" - |writing dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z - |into $format files can be dangerous, as the files may be read by Spark 2.x - |or legacy versions of Hive later, which uses a legacy hybrid calendar that - |is different from Spark 3.0+'s Proleptic Gregorian calendar. See more - |details in SPARK-31404. You can set ${toSQLConf(config)} to 'LEGACY' to rebase the - |datetime values w.r.t. the calendar difference during writing, to get maximum - |interoperability. Or set ${toSQLConf(config)} to 'CORRECTED' to write the datetime - |values as it is, if you are 100% sure that the written files will only be read by - |Spark 3.0+ or other systems that use Proleptic Gregorian calendar. - |""".stripMargin), + "WRITE_ANCIENT_DATETIME", + format, + toSQLConf(config), + toSQLConf(config)), cause = null ) } @@ -997,26 +983,33 @@ object QueryExecutionErrors extends QueryErrorsBase { } def failToParseDateTimeInNewParserError(s: String, e: Throwable): Throwable = { - new SparkUpgradeException("3.0", s"Fail to parse '$s' in the new parser. You can " + - s"set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the behavior " + - s"before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.", e) + new SparkUpgradeException( + errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", + messageParameters = Array( + "PARSE_DATETIME_BY_NEW_PARSER", + toSQLValue(s, StringType), + toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), + e) } def failToFormatDateTimeInNewFormatterError( resultCandidate: String, e: Throwable): Throwable = { - new SparkUpgradeException("3.0", - s""" - |Fail to format it to '$resultCandidate' in the new formatter. You can set - |${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the behavior before - |Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. - """.stripMargin.replaceAll("\n", " "), e) + new SparkUpgradeException( + errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", + messageParameters = Array( + "PARSE_DATETIME_BY_NEW_PARSER", + toSQLValue(resultCandidate, StringType), + toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), + e) } def failToRecognizePatternAfterUpgradeError(pattern: String, e: Throwable): Throwable = { - new SparkUpgradeException("3.0", s"Fail to recognize '$pattern' pattern in the" + - s" DateTimeFormatter. 1) You can set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY" + - s" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern" + - s" with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html", + new SparkUpgradeException( + errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", + messageParameters = Array( + "DATETIME_PATTERN_RECOGNITION", + toSQLValue(pattern, StringType), + toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), e) } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 41515cdee2a..d27f0299505 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -650,7 +650,8 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -659,7 +660,8 @@ select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -668,7 +670,8 @@ select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out index 22694bf84e2..d16e8340f97 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out @@ -17,7 +17,8 @@ select to_timestamp('1', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -35,7 +36,8 @@ select to_timestamp('123', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '123' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '123' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -44,7 +46,8 @@ select to_timestamp('1', 'yyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -53,7 +56,8 @@ select to_timestamp('1234567', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -71,7 +75,8 @@ select to_timestamp('9', 'DD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -80,7 +85,8 @@ select to_timestamp('9', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -89,7 +95,8 @@ select to_timestamp('99', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '99' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '99' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -161,7 +168,8 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '2018-366' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '2018-366' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 5183a4d9a7c..00f423de5f7 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -733,7 +733,8 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -742,7 +743,8 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -751,7 +753,8 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -760,7 +763,8 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -769,7 +773,8 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat' struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -778,7 +783,8 @@ select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index 49ca0250279..a5ff08ba74c 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -640,7 +640,8 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -649,7 +650,8 @@ select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -658,7 +660,8 @@ select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out index d04afc7e9c4..b6624e0fa5b 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out @@ -8,7 +8,8 @@ select date_format('2018-11-17 13:33:33.333', 'GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'GGGGG' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'GGGGG' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -17,7 +18,8 @@ select date_format('2018-11-17 13:33:33.333', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -44,7 +46,8 @@ select date_format('2018-11-17 13:33:33.333', 'MMMMM') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'MMMMM' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'MMMMM' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -53,7 +56,8 @@ select date_format('2018-11-17 13:33:33.333', 'LLLLL') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'LLLLL' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'LLLLL' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -62,7 +66,8 @@ select date_format('2018-11-17 13:33:33.333', 'EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -71,7 +76,8 @@ select date_format('2018-11-17 13:33:33.333', 'FF') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'FF' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'FF' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -80,7 +86,8 @@ select date_format('2018-11-17 13:33:33.333', 'ddd') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'ddd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'ddd' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -89,7 +96,8 @@ select date_format('2018-11-17 13:33:33.333', 'DDDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'DDDD' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'DDDD' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -98,7 +106,8 @@ select date_format('2018-11-17 13:33:33.333', 'HHH') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'HHH' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'HHH' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -107,7 +116,8 @@ select date_format('2018-11-17 13:33:33.333', 'hhh') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'hhh' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'hhh' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -116,7 +126,8 @@ select date_format('2018-11-17 13:33:33.333', 'kkk') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'kkk' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'kkk' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -125,7 +136,8 @@ select date_format('2018-11-17 13:33:33.333', 'KKK') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'KKK' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'KKK' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -134,7 +146,8 @@ select date_format('2018-11-17 13:33:33.333', 'mmm') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'mmm' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'mmm' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -143,7 +156,8 @@ select date_format('2018-11-17 13:33:33.333', 'sss') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'sss' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'sss' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -152,7 +166,8 @@ select date_format('2018-11-17 13:33:33.333', 'SSSSSSSSSS') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'SSSSSSSSSS' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'SSSSSSSSSS' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -161,7 +176,8 @@ select date_format('2018-11-17 13:33:33.333', 'aa') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -179,7 +195,8 @@ select date_format('2018-11-17 13:33:33.333', 'zzzzz') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'zzzzz' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'zzzzz' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -197,7 +214,8 @@ select date_format('2018-11-17 13:33:33.333', 'ZZZZZZ') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'ZZZZZZ' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'ZZZZZZ' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -260,7 +278,8 @@ select date_format('2018-11-17 13:33:33.333', 'Y') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'Y' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'Y' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -269,7 +288,8 @@ select date_format('2018-11-17 13:33:33.333', 'w') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'w' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'w' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -278,7 +298,8 @@ select date_format('2018-11-17 13:33:33.333', 'W') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'W' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'W' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -287,7 +308,8 @@ select date_format('2018-11-17 13:33:33.333', 'u') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'u' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'u' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out index e453105b10e..a17840e7712 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out @@ -17,7 +17,8 @@ select to_timestamp('1', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -34,7 +35,8 @@ select to_timestamp('123', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '123' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '123' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -43,7 +45,8 @@ select to_timestamp('1', 'yyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '1' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -52,7 +55,8 @@ select to_timestamp('1234567', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -69,7 +73,8 @@ select to_timestamp('9', 'DD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -78,7 +83,8 @@ select to_timestamp('9', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '9' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -87,7 +93,8 @@ select to_timestamp('99', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '99' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '99' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -152,7 +159,8 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '2018-366' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '2018-366' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 6f1a5c3bd5b..830e22522b9 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -339,7 +339,8 @@ select from_json( struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '02-29' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '02-29' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query @@ -351,7 +352,8 @@ select from_json( struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to parse '02-29' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string. +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to parse '02-29' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0, or set to 'CORRECTED' and treat it as an invalid datetime string. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 24c78a245ca..be105aaeaf9 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -727,7 +727,8 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -736,7 +737,8 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -745,7 +747,8 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -754,7 +757,8 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -763,7 +767,8 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat' struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -772,7 +777,8 @@ select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 1dee8d0ad98..1363ecaf4d2 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -761,7 +761,8 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 4831440fb2d..8d47ef96128 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -754,7 +754,8 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out index bf8966feea2..1f5175c51a5 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -139,7 +139,8 @@ select to_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -156,7 +157,8 @@ select to_unix_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -173,7 +175,8 @@ select unix_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION] You may get a different result due to the upgrading to Spark >= 3.0: Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: +Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to 'LEGACY' to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index ac865d01e3b..baa731571f7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -220,6 +220,7 @@ class QueryExecutionErrorsSuite checkErrorClass( exception = e, errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", + errorSubClass = Some("READ_ANCIENT_DATETIME"), msg = "You may get a different result due to the upgrading to Spark >= 3.0: " + s""" @@ -248,6 +249,7 @@ class QueryExecutionErrorsSuite checkErrorClass( exception = e, errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", + errorSubClass = Some("WRITE_ANCIENT_DATETIME"), msg = "You may get a different result due to the upgrading to Spark >= 3.0: " + s""" @@ -258,7 +260,7 @@ class QueryExecutionErrorsSuite |details in SPARK-31404. You can set $config to 'LEGACY' to rebase the |datetime values w.r.t. the calendar difference during writing, to get maximum |interoperability. Or set $config to 'CORRECTED' to write the datetime - |values as it is, if you are 100% sure that the written files will only be read by + |values as it is, if you are sure that the written files will only be read by |Spark 3.0+ or other systems that use Proleptic Gregorian calendar. |""".stripMargin) } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org