This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch branch-3.1 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.1 by this push: new ff83105 [SPARK-35679][SQL] instantToMicros overflow ff83105 is described below commit ff831054d9a3b0fbd58b532bf6c527276d7994c6 Author: dgd-contributor <dgd_contribu...@viettel.com.vn> AuthorDate: Thu Jun 10 08:08:51 2021 +0300 [SPARK-35679][SQL] instantToMicros overflow ### Why are the changes needed? With Long.minValue cast to an instant, secs will be floored in function microsToInstant and cause overflow when multiply with Micros_per_second ``` def microsToInstant(micros: Long): Instant = { val secs = Math.floorDiv(micros, MICROS_PER_SECOND) // Unfolded Math.floorMod(us, MICROS_PER_SECOND) to reuse the result of // the above calculation of `secs` via `floorDiv`. val mos = micros - secs * MICROS_PER_SECOND <- it will overflow here Instant.ofEpochSecond(secs, mos * NANOS_PER_MICROS) } ``` But the overflow is acceptable because it won't produce any change to the result However, when convert the instant back to micro value, it will raise Overflow Error ``` def instantToMicros(instant: Instant): Long = { val us = Math.multiplyExact(instant.getEpochSecond, MICROS_PER_SECOND) <- It overflow here val result = Math.addExact(us, NANOSECONDS.toMicros(instant.getNano)) result } ``` Code to reproduce this error ``` instantToMicros(microToInstant(Long.MinValue)) ``` ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Test added Closes #32839 from dgd-contributor/SPARK-35679_instantToMicro. Authored-by: dgd-contributor <dgd_contribu...@viettel.com.vn> Signed-off-by: Max Gekk <max.g...@gmail.com> (cherry picked from commit aa3de4077302fe7e0b23b01a338c7feab0e5974e) Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../org/apache/spark/sql/catalyst/util/DateTimeUtils.scala | 14 +++++++++++--- .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 5 +++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index 89cb67c..a4c34e1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -375,6 +375,9 @@ object DateTimeUtils { timestamp.get } } + // See issue SPARK-35679 + // min second cause overflow in instant to micro + private val MIN_SECONDS = Math.floorDiv(Long.MinValue, MICROS_PER_SECOND) /** * Gets the number of microseconds since the epoch of 1970-01-01 00:00:00Z from the given @@ -382,9 +385,14 @@ object DateTimeUtils { * microseconds where microsecond 0 is 1970-01-01 00:00:00Z. */ def instantToMicros(instant: Instant): Long = { - val us = Math.multiplyExact(instant.getEpochSecond, MICROS_PER_SECOND) - val result = Math.addExact(us, NANOSECONDS.toMicros(instant.getNano)) - result + val secs = instant.getEpochSecond + if (secs == MIN_SECONDS) { + val us = Math.multiplyExact(secs + 1, MICROS_PER_SECOND) + Math.addExact(us, NANOSECONDS.toMicros(instant.getNano) - MICROS_PER_SECOND) + } else { + val us = Math.multiplyExact(secs, MICROS_PER_SECOND) + Math.addExact(us, NANOSECONDS.toMicros(instant.getNano)) + } } /** diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index fb2d511..8cc6bf2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -688,4 +688,9 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { assert(toDate("tomorrow CET ", zoneId).get === today + 1) } } + + test("SPARK-35679: instantToMicros should be able to return microseconds of Long.MinValue") { + assert(instantToMicros(microsToInstant(Long.MaxValue)) === Long.MaxValue) + assert(instantToMicros(microsToInstant(Long.MinValue)) === Long.MinValue) + } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org