This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 9f7582c8cbe [SPARK-38324][SQL] The second range is not [0, 59] in the 
day time ANSI interval
9f7582c8cbe is described below

commit 9f7582c8cbeae70c31c183567bf5320d1c3210fe
Author: haoyan.zhang <haoyan.zh...@shopee.com>
AuthorDate: Fri Feb 17 09:29:42 2023 +0300

    [SPARK-38324][SQL] The second range is not [0, 59] in the day time ANSI 
interval
    
    ### What changes were proposed in this pull request?
    Throw an error when the second value in day(hour, minute) to second 
interval out of range [0, 59]
    
    ### Why are the changes needed?
    Currently an invalid second value will not get an error
    ```
    spark.sql("select INTERVAL '10 01:01:99' DAY TO SECOND")
    DataFrame[INTERVAL '10 01:02:39' DAY TO SECOND: interval day to second]{}
    ```
    But minute range check is ok
    ```
    spark.sql("select INTERVAL '10 01:60:01' DAY TO SECOND")
    requirement failed: minute 60 outside range [0, 59](line 1, pos 16)
    ```
    
    ### We need check second value too
    
    Does this PR introduce any user-facing change?
    no
    
    ### How was this patch tested?
    New unit tests.
    
    Closes #40033 from haoyanzhang/master.
    
    Authored-by: haoyan.zhang <haoyan.zh...@shopee.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../apache/spark/sql/catalyst/util/IntervalUtils.scala | 13 ++++++++-----
 .../spark/sql/catalyst/util/IntervalUtilsSuite.scala   | 18 ++++++++++++++++++
 2 files changed, 26 insertions(+), 5 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
index 7b574e987d9..455a74e06c4 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
@@ -381,7 +381,7 @@ object IntervalUtils {
     micros = Math.addExact(micros, sign * hours * MICROS_PER_HOUR)
     val minutes = toLongWithRange(minuteStr, minute, 0, 59)
     micros = Math.addExact(micros, sign * minutes * MICROS_PER_MINUTE)
-    micros = Math.addExact(micros, sign * parseSecondNano(second))
+    micros = Math.addExact(micros, sign * parseSecondNano(second, 0, 59))
     micros
   }
 
@@ -391,7 +391,7 @@ object IntervalUtils {
     micros = Math.addExact(micros, sign * hours * MICROS_PER_HOUR)
     val minutes = toLongWithRange(minuteStr, minute, 0, 59)
     micros = Math.addExact(micros, sign * minutes * MICROS_PER_MINUTE)
-    micros = Math.addExact(micros, sign * parseSecondNano(second))
+    micros = Math.addExact(micros, sign * parseSecondNano(second, 0, 59))
     micros
   }
 
@@ -399,7 +399,7 @@ object IntervalUtils {
     var micros = 0L
     val minutes = toLongWithRange(minuteStr, minute, 0, MAX_MINUTE)
     micros = Math.addExact(micros, sign * minutes * MICROS_PER_MINUTE)
-    micros = Math.addExact(micros, sign * parseSecondNano(second))
+    micros = Math.addExact(micros, sign * parseSecondNano(second, 0, 59))
     micros
   }
 
@@ -549,9 +549,12 @@ object IntervalUtils {
   /**
    * Parse second_nano string in ss.nnnnnnnnn format to microseconds
    */
-  private def parseSecondNano(secondNano: String): Long = {
+  private def parseSecondNano(
+      secondNano: String,
+      minSecond: Long = MIN_SECOND,
+      maxSecond: Long = MAX_SECOND): Long = {
     def parseSeconds(secondsStr: String): Long = {
-      toLongWithRange(secondStr, secondsStr, MIN_SECOND, MAX_SECOND) * 
MICROS_PER_SECOND
+      toLongWithRange(secondStr, secondsStr, minSecond, maxSecond) * 
MICROS_PER_SECOND
     }
 
     secondNano.split("\\.") match {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
index 0e65886a2eb..3ba6787045e 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
@@ -665,4 +665,22 @@ class IntervalUtilsSuite extends SparkFunSuite with 
SQLHelper {
       assert(toYearMonthIntervalString(months, ANSI_STYLE, MONTH, MONTH) === 
month)
     }
   }
+
+  test("SPARK-38324: The second range is not [0, 59] in the day time ANSI 
interval") {
+    import org.apache.spark.sql.types.DayTimeIntervalType._
+    Seq(
+      ("10 12:40:60", 60, DAY, SECOND),
+      ("10 12:40:60.999999999", 60, DAY, SECOND),
+      ("10 12:40:99.999999999", 99, DAY, SECOND),
+      ("12:40:60", 60, HOUR, SECOND),
+      ("12:40:60.999999999", 60, HOUR, SECOND),
+      ("12:40:99.999999999", 99, HOUR, SECOND),
+      ("40:60", 60, MINUTE, SECOND),
+      ("40:60.999999999", 60, MINUTE, SECOND),
+      ("40:99.999999999", 99, MINUTE, SECOND)
+    ).foreach { case(input, second, from, to) =>
+      failFuncWithInvalidInput(
+        input, s"second $second outside range [0, 59]", s => 
fromDayTimeString(s, from, to))
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to