MaxGekk commented on a change in pull request #33551:
URL: https://github.com/apache/spark/pull/33551#discussion_r678303725
##########
File path:
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala
##########
@@ -21,10 +21,10 @@ import org.scalatest.PrivateMethodTester
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.types.{LongType, StructField, StructType,
TimestampNTZType, TimestampType}
class TimeWindowSuite extends SparkFunSuite with ExpressionEvalHelper with
PrivateMethodTester {
-
Review comment:
Let's avoid unnecessary changes. This can cause conflicts in down
stream.
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala
##########
@@ -110,10 +115,26 @@ object TimeWindow {
* precision.
*/
def getIntervalInMicroSeconds(interval: String): Long = {
- val cal = IntervalUtils.stringToInterval(UTF8String.fromString(interval))
- if (cal.months != 0) {
- throw new IllegalArgumentException(
- s"Intervals greater than a month is not supported ($interval).")
+ val ymIntervalErrMsg = s"Intervals greater than a month is not supported
($interval)."
+ val cal = try {
Review comment:
This is similar to
https://github.com/apache/spark/blob/07fa38e2c1082c2b69b3bf9489cee4dfe4db2c26/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala#L744-L758
. Could you put the code to a common helper function, and re-use it here and
in `Dataset`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala
##########
@@ -110,10 +115,26 @@ object TimeWindow {
* precision.
*/
def getIntervalInMicroSeconds(interval: String): Long = {
- val cal = IntervalUtils.stringToInterval(UTF8String.fromString(interval))
- if (cal.months != 0) {
- throw new IllegalArgumentException(
- s"Intervals greater than a month is not supported ($interval).")
+ val ymIntervalErrMsg = s"Intervals greater than a month is not supported
($interval)."
+ val cal = try {
+ if (interval.toLowerCase(Locale.ROOT).trim.startsWith("interval")) {
+ CatalystSqlParser.parseExpression(interval) match {
+ case Literal(_: Int, _: YearMonthIntervalType) =>
+ throw new IllegalArgumentException(ymIntervalErrMsg)
+ case Literal(micros: Long, _: DayTimeIntervalType) =>
+ new CalendarInterval(0, 0, micros)
+ }
+ } else {
+ val parsed =
IntervalUtils.stringToInterval(UTF8String.fromString(interval))
+ if (parsed.months != 0) {
+ throw new IllegalArgumentException(ymIntervalErrMsg)
+ } else {
+ parsed
+ }
+ }
+ } catch {
+ case NonFatal(e) =>
+ throw QueryCompilationErrors.cannotParseTimeDelayError(interval, e)
}
cal.days * MICROS_PER_DAY + cal.microseconds
Review comment:
Not related to the PR but:
1. `*` and `+` can overflow. I think we should use exact arithmetic ops here.
2. one day in `CalendarInterval` can be 23, 24, or 25 hours, see
https://github.com/apache/spark/blob/0494dc90af48ce7da0625485a4dc6917a244d580/common/unsafe/src/main/java/org/apache/spark/unsafe/types/CalendarInterval.java#L42
##########
File path:
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala
##########
@@ -147,4 +147,39 @@ class TimeWindowSuite extends SparkFunSuite with
ExpressionEvalHelper with Priva
assert(timestampNTZWindow.dataType == StructType(
Seq(StructField("start", TimestampNTZType), StructField("end",
TimestampNTZType))))
}
+
+ test("SPARK-36323: Support ANSI interval literals for TimeWindow") {
Review comment:
Could you test when `spark.sql.legacy.interval.enabled` is `true`?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]