This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 480d5a73bbd3 [SPARK-47259][SQL] Assign names to error conditions for
interval errors
480d5a73bbd3 is described below
commit 480d5a73bbd3d687822dc4bb5e84bdb4b5e06dd4
Author: Vladan Vasić <[email protected]>
AuthorDate: Wed Sep 11 19:45:34 2024 +0200
[SPARK-47259][SQL] Assign names to error conditions for interval errors
### What changes were proposed in this pull request?
In the PR, I propose to rename the legacy error classes
`_LEGACY_ERROR_TEMP_32[09-14]` to `ILLEGAL_DAY_OF_WEEK`,
`INVALID_INTERVAL_FORMAT.[SECOND_NANO_FORMAT, DAY_TIME_PARSING,
UNSUPPORTED_FROM_TO_EXPRESSION, INTERVAL_PARSING, UNMATCHED_FORMAT_STRING]` and
modifying their error messages properly. Also, I propose modifying the test
that checks `INVALID_INTERVAL_FORMAT.DAY_TIME_PARSING` in a way that it also
checks error class and sql state. Furthermore, I propose to add tests which che
[...]
### Why are the changes needed?
Proper name improves user experience w/ Spark SQL.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
It was tested by modifying existing tests in `IntervalUtilsSuite`,
`DateTimeUtilsSuite`, `DDLParserSuite` and `CastSuiteBase` classes. Tests were
modified to check modified names and sql states. Additionally, test for
checking `INVALID_INTERVAL_FORMAT.SECOND_NANO_FORMAT` and
`INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION` exceptions were added
in `IntervalUtilsSuite`.
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #47846 from
vladanvasi-db/vladanvasi-db/interval-error-classes-naming-fix.
Authored-by: Vladan Vasić <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 66 ++++++++++--------
.../spark/sql/catalyst/parser/AstBuilder.scala | 8 +++
.../spark/sql/catalyst/util/DateTimeUtils.scala | 2 +-
.../spark/sql/catalyst/util/IntervalUtils.scala | 58 +++++++++-------
.../sql/catalyst/expressions/CastSuiteBase.scala | 24 +++----
.../sql/catalyst/util/DateTimeUtilsSuite.scala | 4 +-
.../sql/catalyst/util/IntervalUtilsSuite.scala | 40 ++++++++++-
.../analyzer-results/ansi/interval.sql.out | 70 ++++++++++++++-----
.../sql-tests/analyzer-results/interval.sql.out | 70 ++++++++++++++-----
.../analyzer-results/postgreSQL/interval.sql.out | 80 ++++++++++++++++------
.../resources/sql-tests/results/ansi/date.sql.out | 3 +-
.../sql-tests/results/ansi/interval.sql.out | 70 ++++++++++++++-----
.../resources/sql-tests/results/interval.sql.out | 70 ++++++++++++++-----
.../sql-tests/results/postgreSQL/interval.sql.out | 80 ++++++++++++++++------
.../sql/execution/command/DDLParserSuite.scala | 8 +--
15 files changed, 462 insertions(+), 191 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index f254a11eea95..29eda228c2da 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -1583,6 +1583,12 @@
],
"sqlState" : "42601"
},
+ "ILLEGAL_DAY_OF_WEEK" : {
+ "message" : [
+ "Illegal input for day of week: <string>."
+ ],
+ "sqlState" : "22009"
+ },
"ILLEGAL_STATE_STORE_VALUE" : {
"message" : [
"Illegal value provided to the State Store"
@@ -2395,6 +2401,11 @@
"Uncaught arithmetic exception while parsing '<input>'."
]
},
+ "DAY_TIME_PARSING" : {
+ "message" : [
+ "Error parsing interval day-time string: <msg>."
+ ]
+ },
"INPUT_IS_EMPTY" : {
"message" : [
"Interval string cannot be empty."
@@ -2405,6 +2416,11 @@
"Interval string cannot be null."
]
},
+ "INTERVAL_PARSING" : {
+ "message" : [
+ "Error parsing interval <interval> string."
+ ]
+ },
"INVALID_FRACTION" : {
"message" : [
"<unit> cannot have fractional part."
@@ -2440,15 +2456,35 @@
"Expect a unit name after <word> but hit EOL."
]
},
+ "SECOND_NANO_FORMAT" : {
+ "message" : [
+ "Interval string does not match second-nano format of ss.nnnnnnnnn."
+ ]
+ },
"UNKNOWN_PARSING_ERROR" : {
"message" : [
"Unknown error when parsing <word>."
]
},
+ "UNMATCHED_FORMAT_STRING" : {
+ "message" : [
+ "Interval string does not match <intervalStr> format of
<supportedFormat> when cast to <typeName>: <input>."
+ ]
+ },
+ "UNMATCHED_FORMAT_STRING_WITH_NOTICE" : {
+ "message" : [
+ "Interval string does not match <intervalStr> format of
<supportedFormat> when cast to <typeName>: <input>. Set
\"spark.sql.legacy.fromDayTimeString.enabled\" to \"true\" to restore the
behavior before Spark 3.0."
+ ]
+ },
"UNRECOGNIZED_NUMBER" : {
"message" : [
"Unrecognized number <number>."
]
+ },
+ "UNSUPPORTED_FROM_TO_EXPRESSION" : {
+ "message" : [
+ "Cannot support (interval '<input>' <from> to <to>) expression."
+ ]
}
},
"sqlState" : "22006"
@@ -8438,36 +8474,6 @@
"The number of fields (<numFields>) in the partition identifier is not
equal to the partition schema length (<schemaLen>). The identifier might not
refer to one partition."
]
},
- "_LEGACY_ERROR_TEMP_3209" : {
- "message" : [
- "Illegal input for day of week: <string>"
- ]
- },
- "_LEGACY_ERROR_TEMP_3210" : {
- "message" : [
- "Interval string does not match second-nano format of ss.nnnnnnnnn"
- ]
- },
- "_LEGACY_ERROR_TEMP_3211" : {
- "message" : [
- "Error parsing interval day-time string: <msg>"
- ]
- },
- "_LEGACY_ERROR_TEMP_3212" : {
- "message" : [
- "Cannot support (interval '<input>' <from> to <to>) expression"
- ]
- },
- "_LEGACY_ERROR_TEMP_3213" : {
- "message" : [
- "Error parsing interval <interval> string: <msg>"
- ]
- },
- "_LEGACY_ERROR_TEMP_3214" : {
- "message" : [
- "Interval string does not match <intervalStr> format of
<supportedFormat> when cast to <typeName>: <input><fallBackNotice>"
- ]
- },
"_LEGACY_ERROR_TEMP_3215" : {
"message" : [
"Expected a Boolean type expression in replaceNullWithFalse, but got the
type <dataType> in <expr>."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 205af9e33c17..924b5c2cfeb1 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -3466,6 +3466,14 @@ class AstBuilder extends DataTypeAstBuilder
throw QueryParsingErrors.fromToIntervalUnsupportedError(from, to,
ctx)
}
} catch {
+ // Keep error class of SparkIllegalArgumentExceptions and enrich it
with query context
+ case se: SparkIllegalArgumentException =>
+ val pe = new ParseException(
+ errorClass = se.getErrorClass,
+ messageParameters = se.getMessageParameters.asScala.toMap,
+ ctx)
+ pe.setStackTrace(se.getStackTrace)
+ throw pe
// Handle Exceptions thrown by CalendarInterval
case e: IllegalArgumentException =>
val pe = new ParseException(
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index f1c36f2f5c28..e27ce29fc231 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -389,7 +389,7 @@ object DateTimeUtils extends SparkDateTimeUtils {
case "SA" | "SAT" | "SATURDAY" => SATURDAY
case _ =>
throw new SparkIllegalArgumentException(
- errorClass = "_LEGACY_ERROR_TEMP_3209",
+ errorClass = "ILLEGAL_DAY_OF_WEEK",
messageParameters = Map("string" -> string.toString))
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
index 0067114e36fd..90c802b7e28d 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
@@ -105,16 +105,18 @@ object IntervalUtils extends SparkIntervalUtils {
endField: Byte,
intervalStr: String,
typeName: String,
- fallBackNotice: Option[String] = None) = {
+ fallBackNotice: Boolean = false) = {
throw new SparkIllegalArgumentException(
- errorClass = "_LEGACY_ERROR_TEMP_3214",
+ errorClass = {
+ if (fallBackNotice)
"INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE"
+ else "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING"
+ },
messageParameters = Map(
"intervalStr" -> intervalStr,
"supportedFormat" -> supportedFormat((intervalStr, startFiled,
endField))
.map(format => s"`$format`").mkString(", "),
"typeName" -> typeName,
- "input" -> input.toString,
- "fallBackNotice" -> fallBackNotice.map(s => s", $s").getOrElse("")))
+ "input" -> input.toString))
}
val supportedFormat = Map(
@@ -145,14 +147,15 @@ object IntervalUtils extends SparkIntervalUtils {
def checkTargetType(targetStartField: Byte, targetEndField: Byte): Boolean
=
startField == targetStartField && endField == targetEndField
- input.trimAll().toString match {
+ val trimmedInput = input.trimAll().toString
+ trimmedInput match {
case yearMonthRegex(sign, year, month) if checkTargetType(YM.YEAR,
YM.MONTH) =>
- toYMInterval(year, month, finalSign(sign))
+ toYMInterval(year, month, trimmedInput, finalSign(sign))
case yearMonthLiteralRegex(firstSign, secondSign, year, month)
if checkTargetType(YM.YEAR, YM.MONTH) =>
- toYMInterval(year, month, finalSign(firstSign, secondSign))
+ toYMInterval(year, month, trimmedInput, finalSign(firstSign,
secondSign))
case yearMonthIndividualRegex(firstSign, value) =>
- safeToInterval("year-month") {
+ safeToInterval("year-month", trimmedInput) {
val sign = finalSign(firstSign)
if (endField == YM.YEAR) {
sign * Math.toIntExact(value.toLong * MONTHS_PER_YEAR)
@@ -164,7 +167,7 @@ object IntervalUtils extends SparkIntervalUtils {
}
}
case yearMonthIndividualLiteralRegex(firstSign, secondSign, value, unit)
=>
- safeToInterval("year-month") {
+ safeToInterval("year-month", trimmedInput) {
val sign = finalSign(firstSign, secondSign)
unit.toUpperCase(Locale.ROOT) match {
case "YEAR" if checkTargetType(YM.YEAR, YM.YEAR) =>
@@ -202,21 +205,21 @@ object IntervalUtils extends SparkIntervalUtils {
new CalendarInterval(months, 0, 0)
}
- private def safeToInterval[T](interval: String)(f: => T): T = {
+ private def safeToInterval[T](interval: String, input: String)(f: => T): T =
{
try {
f
} catch {
case e: SparkThrowable => throw e
case NonFatal(e) =>
throw new SparkIllegalArgumentException(
- errorClass = "_LEGACY_ERROR_TEMP_3213",
- messageParameters = Map("interval" -> interval, "msg" ->
e.getMessage),
+ errorClass = "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ messageParameters = Map("input" -> input, "interval" -> interval),
cause = e)
}
}
- private def toYMInterval(year: String, month: String, sign: Int): Int = {
- safeToInterval("year-month") {
+ private def toYMInterval(year: String, month: String, input: String, sign:
Int): Int = {
+ safeToInterval("year-month", input) {
val years = toLongWithRange(yearStr, year, 0, Integer.MAX_VALUE /
MONTHS_PER_YEAR)
val totalMonths =
sign * (years * MONTHS_PER_YEAR + toLongWithRange(monthStr, month, 0,
11))
@@ -285,7 +288,8 @@ object IntervalUtils extends SparkIntervalUtils {
def checkTargetType(targetStartField: Byte, targetEndField: Byte): Boolean
=
startField == targetStartField && endField == targetEndField
- input.trimAll().toString match {
+ val trimmedInput = input.trimAll().toString
+ trimmedInput match {
case dayHourRegex(sign, day, hour) if checkTargetType(DT.DAY, DT.HOUR) =>
toDTInterval(day, hour, "0", "0", finalSign(sign))
case dayHourLiteralRegex(firstSign, secondSign, day, hour)
@@ -324,7 +328,7 @@ object IntervalUtils extends SparkIntervalUtils {
toDTInterval(minute, secondAndMicro(second, micro),
finalSign(firstSign, secondSign))
case dayTimeIndividualRegex(firstSign, value, suffix) =>
- safeToInterval("day-time") {
+ safeToInterval("day-time", trimmedInput) {
val sign = finalSign(firstSign)
(startField, endField) match {
case (DT.DAY, DT.DAY) if suffix == null && value.length <= 9 =>
@@ -339,11 +343,11 @@ object IntervalUtils extends SparkIntervalUtils {
case -1 => parseSecondNano(s"-${secondAndMicro(value,
suffix)}")
}
case (_, _) => throwIllegalIntervalFormatException(input,
startField, endField,
- "day-time", DT(startField, endField).typeName,
Some(fallbackNotice))
+ "day-time", DT(startField, endField).typeName, true)
}
}
case dayTimeIndividualLiteralRegex(firstSign, secondSign, value, suffix,
unit) =>
- safeToInterval("day-time") {
+ safeToInterval("day-time", trimmedInput) {
val sign = finalSign(firstSign, secondSign)
unit.toUpperCase(Locale.ROOT) match {
case "DAY" if suffix == null && value.length <= 9 &&
checkTargetType(DT.DAY, DT.DAY) =>
@@ -360,11 +364,11 @@ object IntervalUtils extends SparkIntervalUtils {
case -1 => parseSecondNano(s"-${secondAndMicro(value,
suffix)}")
}
case _ => throwIllegalIntervalFormatException(input, startField,
endField,
- "day-time", DT(startField, endField).typeName,
Some(fallbackNotice))
+ "day-time", DT(startField, endField).typeName, true)
}
}
case _ => throwIllegalIntervalFormatException(input, startField,
endField,
- "day-time", DT(startField, endField).typeName, Some(fallbackNotice))
+ "day-time", DT(startField, endField).typeName, true)
}
}
@@ -512,7 +516,7 @@ object IntervalUtils extends SparkIntervalUtils {
case DT.SECOND =>
// No-op
case _ => throw new SparkIllegalArgumentException(
- errorClass = "_LEGACY_ERROR_TEMP_3212",
+ errorClass =
"INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION",
messageParameters = Map(
"input" -> input,
"from" -> DT.fieldToString(from),
@@ -524,10 +528,14 @@ object IntervalUtils extends SparkIntervalUtils {
micros = Math.addExact(micros, Math.multiplyExact(seconds,
MICROS_PER_SECOND))
new CalendarInterval(0, sign * days, sign * micros)
} catch {
+ // Bypass SparkIllegalArgumentExceptions
+ case se: SparkIllegalArgumentException => throw se
case e: Exception =>
throw new SparkIllegalArgumentException(
- errorClass = "_LEGACY_ERROR_TEMP_3211",
- messageParameters = Map("msg" -> e.getMessage),
+ errorClass = "INVALID_INTERVAL_FORMAT.DAY_TIME_PARSING",
+ messageParameters = Map(
+ "msg" -> e.getMessage,
+ "input" -> input),
cause = e)
}
}
@@ -564,7 +572,9 @@ object IntervalUtils extends SparkIntervalUtils {
case Array(secondsStr, nanosStr) =>
val seconds = parseSeconds(secondsStr)
Math.addExact(seconds, parseNanos(nanosStr, seconds < 0))
- case _ => throw new
SparkIllegalArgumentException("_LEGACY_ERROR_TEMP_3210")
+ case _ => throw new SparkIllegalArgumentException(
+ errorClass = "INVALID_INTERVAL_FORMAT.SECOND_NANO_FORMAT",
+ messageParameters = Map("input" -> secondNano))
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala
index 67a68fc92a30..e87b54339821 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala
@@ -1107,8 +1107,10 @@ abstract class CastSuiteBase extends SparkFunSuite with
ExpressionEvalHelper {
.foreach { interval =>
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), YearMonthIntervalType()),
- "_LEGACY_ERROR_TEMP_3213",
- Map("interval" -> "year-month", "msg" -> "integer overflow"))
+ "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ Map(
+ "interval" -> "year-month",
+ "input" -> interval))
}
Seq(Byte.MaxValue, Short.MaxValue, Int.MaxValue, Int.MinValue + 1,
Int.MinValue)
@@ -1176,9 +1178,8 @@ abstract class CastSuiteBase extends SparkFunSuite with
ExpressionEvalHelper {
val dataType = YearMonthIntervalType()
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
- "_LEGACY_ERROR_TEMP_3214",
+ "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING",
Map(
- "fallBackNotice" -> "",
"typeName" -> "interval year to month",
"intervalStr" -> "year-month",
"supportedFormat" -> "`[+|-]y-m`, `INTERVAL [+|-]'[+|-]y-m' YEAR TO
MONTH`",
@@ -1198,9 +1199,8 @@ abstract class CastSuiteBase extends SparkFunSuite with
ExpressionEvalHelper {
.foreach { case (interval, dataType) =>
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
- "_LEGACY_ERROR_TEMP_3214",
+ "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING",
Map(
- "fallBackNotice" -> "",
"typeName" -> dataType.typeName,
"intervalStr" -> "year-month",
"supportedFormat" ->
@@ -1322,10 +1322,8 @@ abstract class CastSuiteBase extends SparkFunSuite with
ExpressionEvalHelper {
.foreach { case (interval, dataType) =>
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
- "_LEGACY_ERROR_TEMP_3214",
- Map("fallBackNotice" -> (", set
spark.sql.legacy.fromDayTimeString.enabled" +
- " to true to restore the behavior before Spark 3.0."),
- "intervalStr" -> "day-time",
+ "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ Map("intervalStr" -> "day-time",
"typeName" -> dataType.typeName,
"input" -> interval,
"supportedFormat" ->
@@ -1348,10 +1346,8 @@ abstract class CastSuiteBase extends SparkFunSuite with
ExpressionEvalHelper {
.foreach { case (interval, dataType) =>
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
- "_LEGACY_ERROR_TEMP_3214",
- Map("fallBackNotice" -> (", set
spark.sql.legacy.fromDayTimeString.enabled" +
- " to true to restore the behavior before Spark 3.0."),
- "intervalStr" -> "day-time",
+ "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ Map("intervalStr" -> "day-time",
"typeName" -> dataType.typeName,
"input" -> interval,
"supportedFormat" ->
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 8f15e70fa5a4..96aaf13052b0 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -896,13 +896,13 @@ class DateTimeUtilsSuite extends SparkFunSuite with
Matchers with SQLHelper {
exception = intercept[SparkIllegalArgumentException] {
getDayOfWeekFromString(UTF8String.fromString("xx"))
},
- condition = "_LEGACY_ERROR_TEMP_3209",
+ condition = "ILLEGAL_DAY_OF_WEEK",
parameters = Map("string" -> "xx"))
checkError(
exception = intercept[SparkIllegalArgumentException] {
getDayOfWeekFromString(UTF8String.fromString("\"quote"))
},
- condition = "_LEGACY_ERROR_TEMP_3209",
+ condition = "ILLEGAL_DAY_OF_WEEK",
parameters = Map("string" -> "\"quote"))
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
index 3b89ae9be2d8..700dfe30a238 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
@@ -295,7 +295,7 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
assert(fromYearMonthString("99-10") === new CalendarInterval(99 * 12 + 10,
0, 0L))
assert(fromYearMonthString("+99-10") === new CalendarInterval(99 * 12 +
10, 0, 0L))
assert(fromYearMonthString("-8-10") === new CalendarInterval(-8 * 12 - 10,
0, 0L))
- failFuncWithInvalidInput("99-15", "month 15 outside range",
fromYearMonthString)
+ failFuncWithInvalidInput("99-15", "year-month", fromYearMonthString)
failFuncWithInvalidInput("9a9-15", "Interval string does not match
year-month format",
fromYearMonthString)
@@ -314,12 +314,12 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
val e1 = intercept[IllegalArgumentException]{
assert(fromYearMonthString("178956970-8") == new
CalendarInterval(Int.MinValue, 0, 0))
}.getMessage
- assert(e1.contains("integer overflow"))
+ assert(e1.contains("year-month"))
assert(fromYearMonthString("-178956970-8") == new
CalendarInterval(Int.MinValue, 0, 0))
val e2 = intercept[IllegalArgumentException]{
assert(fromYearMonthString("-178956970-9") == new
CalendarInterval(Int.MinValue, 0, 0))
}.getMessage
- assert(e2.contains("integer overflow"))
+ assert(e2.contains("year-month"))
}
test("from day-time string - legacy") {
@@ -338,6 +338,29 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
12 * MICROS_PER_MINUTE + millisToMicros(888)))
assert(fromDayTimeString("-3 0:0:0") === new CalendarInterval(0, -3, 0L))
+ checkError(
+ exception = intercept[SparkIllegalArgumentException] {
+ fromDayTimeString("5 30:12:20")
+ },
+ parameters = Map(
+ "msg" -> "requirement failed: hour 30 outside range [0, 23]",
+ "input" -> "5 30:12:20"),
+ condition = "INVALID_INTERVAL_FORMAT.DAY_TIME_PARSING",
+ sqlState = Some("22006")
+ )
+
+ checkError(
+ exception = intercept[SparkIllegalArgumentException] {
+ fromDayTimeString("5 12:40:30.999999999", 0, 0)
+ },
+ parameters = Map(
+ "from" -> "day",
+ "to" -> "day",
+ "input" -> "5 12:40:30.999999999"),
+ condition = "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION",
+ sqlState = Some("22006")
+ )
+
failFuncWithInvalidInput("5 30:12:20", "hour 30 outside range",
fromDayTimeString)
failFuncWithInvalidInput("5 30-12", "must match day-time format",
fromDayTimeString)
}
@@ -379,6 +402,17 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
assert(negate(new CalendarInterval(1, 2, 3)) === new CalendarInterval(-1,
-2, -3))
}
+ test("parsing second_nano string") {
+ checkError(
+ exception = intercept[SparkIllegalArgumentException] {
+ toDTInterval("12", "33.33.33", 1)
+ },
+ condition = "INVALID_INTERVAL_FORMAT.SECOND_NANO_FORMAT",
+ parameters = Map("input" -> "33.33.33"),
+ sqlState = Some("22006")
+ )
+ }
+
test("subtract one interval by another") {
val input1 = new CalendarInterval(3, 1, 1 * MICROS_PER_HOUR)
val input2 = new CalendarInterval(2, 4, 100 * MICROS_PER_HOUR)
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out
index 12756576ded9..472c9b1df064 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out
@@ -981,9 +981,13 @@ select interval '20 15:40:32.99899999' day to hour
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -1000,9 +1004,13 @@ select interval '20 15:40:32.99899999' day to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h:m`,
`INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute:
20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO
MINUTE`",
+ "typeName" : "interval day to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1019,9 +1027,13 @@ select interval '15:40:32.99899999' hour to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1038,9 +1050,13 @@ select interval '15:40.99899999' hour to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true
to restore the behavior before Spark 3.0."
+ "input" : "15:40.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1057,9 +1073,13 @@ select interval '15:40' hour to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1076,9 +1096,13 @@ select interval '20 40:32.99899999' minute to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to
true to restore the behavior before Spark 3.0."
+ "input" : "20 40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1460,9 +1484,11 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Error parsing interval year-month string: integer overflow"
+ "input" : "178956970-8",
+ "interval" : "year-month"
},
"queryContext" : [ {
"objectType" : "",
@@ -1909,9 +1935,13 @@ select interval '-\t2-2\t' year to month
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match year-month format of `[+|-]y-m`,
`INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH` when cast to interval year to month:
-\t2-2\t"
+ "input" : "-\t2-2\t",
+ "intervalStr" : "year-month",
+ "supportedFormat" : "`[+|-]y-m`, `INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH`",
+ "typeName" : "interval year to month"
},
"queryContext" : [ {
"objectType" : "",
@@ -1935,9 +1965,13 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d
h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval
day to second: \n-\t10\t 12:34:46.789\t, set
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior
before Spark 3.0."
+ "input" : "\n-\t10\t 12:34:46.789\t",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY
TO SECOND`",
+ "typeName" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out
index 290e55052931..3db38d482b26 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out
@@ -981,9 +981,13 @@ select interval '20 15:40:32.99899999' day to hour
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -1000,9 +1004,13 @@ select interval '20 15:40:32.99899999' day to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h:m`,
`INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute:
20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO
MINUTE`",
+ "typeName" : "interval day to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1019,9 +1027,13 @@ select interval '15:40:32.99899999' hour to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1038,9 +1050,13 @@ select interval '15:40.99899999' hour to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true
to restore the behavior before Spark 3.0."
+ "input" : "15:40.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1057,9 +1073,13 @@ select interval '15:40' hour to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1076,9 +1096,13 @@ select interval '20 40:32.99899999' minute to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to
true to restore the behavior before Spark 3.0."
+ "input" : "20 40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1460,9 +1484,11 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Error parsing interval year-month string: integer overflow"
+ "input" : "178956970-8",
+ "interval" : "year-month"
},
"queryContext" : [ {
"objectType" : "",
@@ -1909,9 +1935,13 @@ select interval '-\t2-2\t' year to month
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match year-month format of `[+|-]y-m`,
`INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH` when cast to interval year to month:
-\t2-2\t"
+ "input" : "-\t2-2\t",
+ "intervalStr" : "year-month",
+ "supportedFormat" : "`[+|-]y-m`, `INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH`",
+ "typeName" : "interval year to month"
},
"queryContext" : [ {
"objectType" : "",
@@ -1935,9 +1965,13 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d
h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval
day to second: \n-\t10\t 12:34:46.789\t, set
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior
before Spark 3.0."
+ "input" : "\n-\t10\t 12:34:46.789\t",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY
TO SECOND`",
+ "typeName" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/interval.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/interval.sql.out
index 8d41651cb743..1add0830d9b7 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/interval.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/interval.sql.out
@@ -88,9 +88,13 @@ SELECT interval '1 2:03' day to hour
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the
behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -107,9 +111,13 @@ SELECT interval '1 2:03:04' day to hour
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the
behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -133,9 +141,13 @@ SELECT interval '1 2:03:04' day to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h:m`,
`INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute:
1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore
the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO
MINUTE`",
+ "typeName" : "interval day to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -152,9 +164,13 @@ SELECT interval '1 2:03' day to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d
h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval
day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true
to restore the behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY
TO SECOND`",
+ "typeName" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -178,9 +194,13 @@ SELECT interval '1 2:03' hour to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the
behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -197,9 +217,13 @@ SELECT interval '1 2:03:04' hour to minute
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore
the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -216,9 +240,13 @@ SELECT interval '1 2:03' hour to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -235,9 +263,13 @@ SELECT interval '1 2:03:04' hour to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -254,9 +286,13 @@ SELECT interval '1 2:03' minute to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -273,9 +309,13 @@ SELECT interval '1 2:03:04' minute to second
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index 16cabe60419c..67cd23faf255 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -268,7 +268,8 @@ struct<>
-- !query output
org.apache.spark.SparkIllegalArgumentException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_3209",
+ "errorClass" : "ILLEGAL_DAY_OF_WEEK",
+ "sqlState" : "22009",
"messageParameters" : {
"string" : "xx"
}
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 0c68023b6eae..9e5c89045e51 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -1255,9 +1255,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -1276,9 +1280,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h:m`,
`INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute:
20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO
MINUTE`",
+ "typeName" : "interval day to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1297,9 +1305,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1318,9 +1330,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true
to restore the behavior before Spark 3.0."
+ "input" : "15:40.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1339,9 +1355,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1360,9 +1380,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to
true to restore the behavior before Spark 3.0."
+ "input" : "20 40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1791,9 +1815,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Error parsing interval year-month string: integer overflow"
+ "input" : "178956970-8",
+ "interval" : "year-month"
},
"queryContext" : [ {
"objectType" : "",
@@ -2338,9 +2364,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match year-month format of `[+|-]y-m`,
`INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH` when cast to interval year to month:
-\t2-2\t"
+ "input" : "-\t2-2\t",
+ "intervalStr" : "year-month",
+ "supportedFormat" : "`[+|-]y-m`, `INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH`",
+ "typeName" : "interval year to month"
},
"queryContext" : [ {
"objectType" : "",
@@ -2367,9 +2397,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d
h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval
day to second: \n-\t10\t 12:34:46.789\t, set
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior
before Spark 3.0."
+ "input" : "\n-\t10\t 12:34:46.789\t",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY
TO SECOND`",
+ "typeName" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out
b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
index e2b9e11eb633..a4e167051767 100644
--- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
@@ -1142,9 +1142,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -1163,9 +1167,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h:m`,
`INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute:
20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "20 15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO
MINUTE`",
+ "typeName" : "interval day to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1184,9 +1192,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -1205,9 +1217,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true
to restore the behavior before Spark 3.0."
+ "input" : "15:40.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1226,9 +1242,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "15:40",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1247,9 +1267,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to
true to restore the behavior before Spark 3.0."
+ "input" : "20 40:32.99899999",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -1678,9 +1702,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Error parsing interval year-month string: integer overflow"
+ "input" : "178956970-8",
+ "interval" : "year-month"
},
"queryContext" : [ {
"objectType" : "",
@@ -2161,9 +2187,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match year-month format of `[+|-]y-m`,
`INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH` when cast to interval year to month:
-\t2-2\t"
+ "input" : "-\t2-2\t",
+ "intervalStr" : "year-month",
+ "supportedFormat" : "`[+|-]y-m`, `INTERVAL [+|-]'[+|-]y-m' YEAR TO MONTH`",
+ "typeName" : "interval year to month"
},
"queryContext" : [ {
"objectType" : "",
@@ -2190,9 +2220,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d
h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval
day to second: \n-\t10\t 12:34:46.789\t, set
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior
before Spark 3.0."
+ "input" : "\n-\t10\t 12:34:46.789\t",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY
TO SECOND`",
+ "typeName" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
diff --git
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
index bff615e22af0..3855d922361b 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
@@ -102,9 +102,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the
behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -123,9 +127,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the
behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
+ "typeName" : "interval day to hour"
},
"queryContext" : [ {
"objectType" : "",
@@ -152,9 +160,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h:m`,
`INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute:
1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore
the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO
MINUTE`",
+ "typeName" : "interval day to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -173,9 +185,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d
h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval
day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true
to restore the behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY
TO SECOND`",
+ "typeName" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -202,9 +218,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the
behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -223,9 +243,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m`,
`INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute:
1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore
the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO
MINUTE`",
+ "typeName" : "interval hour to minute"
},
"queryContext" : [ {
"objectType" : "",
@@ -244,9 +268,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -265,9 +293,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]h:m:s.n`,
`INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to
second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO
SECOND`",
+ "typeName" : "interval hour to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -286,9 +318,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
@@ -307,9 +343,13 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_0063",
+ "errorClass" : "INVALID_INTERVAL_FORMAT.UNMATCHED_FORMAT_STRING_WITH_NOTICE",
+ "sqlState" : "22006",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]m:s.n`,
`INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to
second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "input" : "1 2:03:04",
+ "intervalStr" : "day-time",
+ "supportedFormat" : "`[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO
SECOND`",
+ "typeName" : "interval minute to second"
},
"queryContext" : [ {
"objectType" : "",
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
index d1a6021d829f..176eb7c29076 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
@@ -402,13 +402,13 @@ class DDLParserSuite extends AnalysisTest with
SharedSparkSession {
test("Invalid interval term should throw AnalysisException") {
val sql1 = "select interval '42-32' year to month"
- val value1 = "Error parsing interval year-month string: " +
- "requirement failed: month 32 outside range [0, 11]"
val fragment1 = "'42-32' year to month"
checkError(
exception = parseException(sql1),
- condition = "_LEGACY_ERROR_TEMP_0063",
- parameters = Map("msg" -> value1),
+ condition = "INVALID_INTERVAL_FORMAT.INTERVAL_PARSING",
+ parameters = Map(
+ "input" -> "42-32",
+ "interval" -> "year-month"),
context = ExpectedContext(
fragment = fragment1,
start = 16,
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]