sarutak commented on a change in pull request #32949:
URL: https://github.com/apache/spark/pull/32949#discussion_r666941084
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
##########
@@ -2385,20 +2385,47 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef]
with SQLConfHelper with Logg
// `CalendarInterval` doesn't have enough info. For instance, new
CalendarInterval(0, 0, 0)
// can be derived from INTERVAL '0-0' YEAR TO MONTH as well as from
// INTERVAL '0 00:00:00' DAY TO SECOND.
+ val fromUnit =
+
ctx.errorCapturingUnitToUnitInterval.body.from.getText.toLowerCase(Locale.ROOT)
val toUnit =
ctx.errorCapturingUnitToUnitInterval.body.to.getText.toLowerCase(Locale.ROOT)
if (toUnit == "month") {
assert(calendarInterval.days == 0 && calendarInterval.microseconds ==
0)
- // TODO(SPARK-35773): Parse year-month interval literals to tightest
types
- Literal(calendarInterval.months, YearMonthIntervalType())
+ val start = YearMonthIntervalType.stringToField(fromUnit)
+ Literal(calendarInterval.months, YearMonthIntervalType(start,
YearMonthIntervalType.MONTH))
} else {
assert(calendarInterval.months == 0)
- val fromUnit =
-
ctx.errorCapturingUnitToUnitInterval.body.from.getText.toLowerCase(Locale.ROOT)
val micros = IntervalUtils.getDuration(calendarInterval,
TimeUnit.MICROSECONDS)
val start = DayTimeIntervalType.stringToField(fromUnit)
val end = DayTimeIntervalType.stringToField(toUnit)
Literal(micros, DayTimeIntervalType(start, end))
}
+ } else if (ctx.errorCapturingMultiUnitsInterval != null &&
!conf.legacyIntervalEnabled) {
+ val units =
+ ctx.errorCapturingMultiUnitsInterval.body.unit.asScala.map(
+ _.getText.toLowerCase(Locale.ROOT).stripSuffix("s"))
+ val yearMonthFields = Set.empty[Byte]
+ val dayTimeFields = Set.empty[Byte]
+ for (unit <- units) {
+ if (YearMonthIntervalType.stringToField.contains(unit)) {
+ yearMonthFields += YearMonthIntervalType.stringToField(unit)
+ } else if (DayTimeIntervalType.stringToField.contains(unit)) {
+ dayTimeFields += DayTimeIntervalType.stringToField(unit)
+ } else if (unit == "week") {
+ dayTimeFields += DayTimeIntervalType.DAY
+ } else {
+ assert(unit == "millisecond" || unit == "microsecond")
+ dayTimeFields += DayTimeIntervalType.SECOND
+ }
+ }
+ if (yearMonthFields.nonEmpty) {
+ assert(dayTimeFields.isEmpty)
Review comment:
Actually, I have already tried this but I found we can't get expected
error message.
The text returned by `ctx.errorCapturingMultiUnitsInterval.body.getText` is
not split by white spaces.
This is an example.
```
spark-sql> select interval 3 year 2 minute;
Error in query:
Cannot mix year-month and day-time fields: 3year2minute(line 1, pos 7)
```
We expect `3 years 2 minute` but actually get `3year2minute`.
Or, we can split numbers and units in the returned text, then joined them
with white spaces.
What do you think?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]