AngersZhuuuu commented on a change in pull request #32950:
URL: https://github.com/apache/spark/pull/32950#discussion_r654447861



##########
File path: 
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
##########
@@ -273,6 +274,24 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
     assert(errMsg.contains("long overflow"))
   }
 
+  test("SPARK-35726: Truncate java.time.Duration by fields of day-time 
interval type") {
+    val duration = Duration.ofSeconds(90061)
+    Seq(DayTimeIntervalType(DAY, DAY) -> 86400000000L,
+      DayTimeIntervalType(DAY, HOUR) -> 90000000000L,
+      DayTimeIntervalType(DAY, MINUTE) -> 90060000000L,
+      DayTimeIntervalType(DAY, SECOND) -> 90061000000L,
+      DayTimeIntervalType(HOUR, HOUR) -> 90000000000L,
+      DayTimeIntervalType(HOUR, MINUTE) -> 90060000000L,
+      DayTimeIntervalType(HOUR, SECOND) -> 90061000000L,
+      DayTimeIntervalType(MINUTE, MINUTE) -> 90060000000L,
+      DayTimeIntervalType(MINUTE, SECOND) -> 90061000000L,
+      DayTimeIntervalType(SECOND, SECOND) -> 90061000000L)
+      .foreach { case (dt, value) =>
+        assert(CatalystTypeConverters.createToCatalystConverter(dt)(duration) 
== value)

Review comment:
       updated

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
##########
@@ -892,17 +892,35 @@ object IntervalUtils {
    * @throws ArithmeticException If numeric overflow occurs
    */
   def durationToMicros(duration: Duration): Long = {
+    durationToMicros(duration, DayTimeIntervalType.SECOND)
+  }
+
+  def durationToMicros(duration: Duration, endField: Byte): Long = {
+
+    def secondsToMicros(seconds: Long): Long = {
+      if (seconds == minDurationSeconds) {
+        val microsInSeconds = (minDurationSeconds + 1) * MICROS_PER_SECOND
+        val nanoAdjustment = duration.getNano
+        assert(0 <= nanoAdjustment && nanoAdjustment < NANOS_PER_SECOND,
+          "Duration.getNano() must return the adjustment to the seconds field 
" +
+            "in the range from 0 to 999999999 nanoseconds, inclusive.")
+        Math.addExact(microsInSeconds, (nanoAdjustment - NANOS_PER_SECOND) / 
NANOS_PER_MICROS)
+      } else {
+        val microsInSeconds = Math.multiplyExact(seconds, MICROS_PER_SECOND)
+        Math.addExact(microsInSeconds, duration.getNano / NANOS_PER_MICROS)
+      }
+    }
+
     val seconds = duration.getSeconds
-    if (seconds == minDurationSeconds) {
-      val microsInSeconds = (minDurationSeconds + 1) * MICROS_PER_SECOND
-      val nanoAdjustment = duration.getNano
-      assert(0 <= nanoAdjustment && nanoAdjustment < NANOS_PER_SECOND,
-        "Duration.getNano() must return the adjustment to the seconds field " +
-        "in the range from 0 to 999999999 nanoseconds, inclusive.")
-      Math.addExact(microsInSeconds, (nanoAdjustment - NANOS_PER_SECOND) / 
NANOS_PER_MICROS)
-    } else {
-      val microsInSeconds = Math.multiplyExact(seconds, MICROS_PER_SECOND)
-      Math.addExact(microsInSeconds, duration.getNano / NANOS_PER_MICROS)
+    endField match {
+      case DayTimeIntervalType.DAY =>
+        secondsToMicros(seconds - seconds % SECONDS_PER_DAY)
+      case DayTimeIntervalType.HOUR =>
+        secondsToMicros(seconds - seconds % SECONDS_PER_HOUR)
+      case DayTimeIntervalType.MINUTE =>
+        secondsToMicros(seconds - seconds % SECONDS_PER_MINUTE)

Review comment:
       updtaed

##########
File path: 
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
##########
@@ -273,6 +274,24 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
     assert(errMsg.contains("long overflow"))
   }
 
+  test("SPARK-35726: Truncate java.time.Duration by fields of day-time 
interval type") {
+    val duration = Duration.ofSeconds(90061)

Review comment:
       done

##########
File path: 
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
##########
@@ -273,6 +274,24 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
     assert(errMsg.contains("long overflow"))
   }
 
+  test("SPARK-35726: Truncate java.time.Duration by fields of day-time 
interval type") {
+    val duration = Duration.ofSeconds(90061)
+    Seq(DayTimeIntervalType(DAY, DAY) -> 86400000000L,
+      DayTimeIntervalType(DAY, HOUR) -> 90000000000L,
+      DayTimeIntervalType(DAY, MINUTE) -> 90060000000L,
+      DayTimeIntervalType(DAY, SECOND) -> 90061000000L,
+      DayTimeIntervalType(HOUR, HOUR) -> 90000000000L,
+      DayTimeIntervalType(HOUR, MINUTE) -> 90060000000L,
+      DayTimeIntervalType(HOUR, SECOND) -> 90061000000L,
+      DayTimeIntervalType(MINUTE, MINUTE) -> 90060000000L,
+      DayTimeIntervalType(MINUTE, SECOND) -> 90061000000L,
+      DayTimeIntervalType(SECOND, SECOND) -> 90061000000L)
+      .foreach { case (dt, value) =>
+        assert(CatalystTypeConverters.createToCatalystConverter(dt)(duration) 
== value)

Review comment:
       the different is right according to the origin logic?
   ```
    (DayTimeIntervalType(SECOND, SECOND), 90062000000L, -90062000001L))
   ```




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to