MaxGekk commented on code in PR #47846:
URL: https://github.com/apache/spark/pull/47846#discussion_r1729072094
##########
common/utils/src/main/resources/error/error-conditions.json:
##########
@@ -1914,6 +1914,44 @@
],
"sqlState" : "22012"
},
+ "INTERVAL_ERROR" : {
+ "message" : [
+ "Interval error."
+ ],
+ "subClass" : {
+ "DAY_TIME_PARSING" : {
Review Comment:
We already have this error condition `INVALID_INTERVAL_FORMAT`. Can't you
reuse it?
##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala:
##########
@@ -337,7 +337,12 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
10,
12 * MICROS_PER_MINUTE + millisToMicros(888)))
assert(fromDayTimeString("-3 0:0:0") === new CalendarInterval(0, -3, 0L))
+ val dayTimeParsingException = intercept[SparkIllegalArgumentException] {
+ fromDayTimeString("5 30:12:20")
+ }
+ assert(dayTimeParsingException.getErrorClass ===
"INTERVAL_ERROR.DAY_TIME_PARSING")
+ assert(dayTimeParsingException.getSqlState === "22009")
Review Comment:
Could you use `checkError()`, please.
##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala:
##########
@@ -379,6 +384,15 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
assert(negate(new CalendarInterval(1, 2, 3)) === new CalendarInterval(-1,
-2, -3))
}
+ test("parsing second_nano string") {
+ val parsingException = intercept[SparkIllegalArgumentException] {
+ toDTInterval("12", "33.33.33", 1)
+ }
+
+ assert(parsingException.getErrorClass ===
"INTERVAL_ERROR.SECOND_NANO_FORMAT")
+ assert(parsingException.getSqlState === "22009")
+ }
+
Review Comment:
Please, use `checkError()`.
##########
sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out:
##########
@@ -983,7 +983,7 @@ org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0063",
"messageParameters" : {
- "msg" : "Interval string does not match day-time format of `[+|-]d h`,
`INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to
restore the behavior before Spark 3.0."
+ "msg" : "[INTERVAL_ERROR.UNMATCHED_FORMAT_STRING] Interval error. Interval
string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h'
DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior
before Spark 3.0.. SQLSTATE: 22009"
Review Comment:
Can you try to avoid/fix the situation when an exception with properly
assigned name for error condition is included to a legacy one?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]