This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e10a789273c9 [SPARK-49311][SQL] Make it possible for large 'interval
second' values to be cast to decimal
e10a789273c9 is described below
commit e10a789273c940413466fcd0d2bc925f5313b82e
Author: Harsh Motwani <[email protected]>
AuthorDate: Mon Aug 26 14:51:57 2024 +0200
[SPARK-49311][SQL] Make it possible for large 'interval second' values to
be cast to decimal
### What changes were proposed in this pull request?
Prior to this PR, `interval second` values where the number of microseconds
needed to be represented by 19 digits could not be cast to decimal. This PR
removes this gap.
```
scala> sql("select 1000000000000.000000::interval second").show(false)
+---------------------------------------------+
|CAST(1000000000000.000000 AS INTERVAL SECOND)|
+---------------------------------------------+
|INTERVAL '1000000000000' SECOND |
+---------------------------------------------+
scala> sql("select 1000000000000.000000::interval second::decimal(38,
10)").show(false)
org.apache.spark.SparkArithmeticException:
[NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION] 0 cannot be represented as
Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass
this error, and return NULL instead. SQLSTATE: 22003
```
### Why are the changes needed?
This change adds additional coverage.
### Does this PR introduce _any_ user-facing change?
Yes, users couldn't cast large second intervals to decimals earlier. Now,
they can.
### How was this patch tested?
Unit test in `IntervalExpressionsSuite`.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #47808 from harshmotw-db/harshmotw-db/interval_decimal_fix.
Authored-by: Harsh Motwani <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../org/apache/spark/sql/catalyst/util/IntervalUtils.scala | 2 +-
.../sql/catalyst/expressions/IntervalExpressionsSuite.scala | 10 ++++++++++
2 files changed, 11 insertions(+), 1 deletion(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
index 3a7c7b0904dd..0067114e36fd 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
@@ -902,7 +902,7 @@ object IntervalUtils extends SparkIntervalUtils {
case DAY => Decimal(v / MICROS_PER_DAY)
case HOUR => Decimal(v / MICROS_PER_HOUR)
case MINUTE => Decimal(v / MICROS_PER_MINUTE)
- case SECOND => Decimal(v, Decimal.MAX_LONG_DIGITS, 6)
+ case SECOND => Decimal(v, Decimal.MAX_LONG_DIGITS + 1, 6)
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
index a60ab778623e..ff5ffe4e869a 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala
@@ -117,6 +117,16 @@ class IntervalExpressionsSuite extends SparkFunSuite with
ExpressionEvalHelper {
checkEvaluation(ExtractIntervalSeconds("61 seconds 1 microseconds"),
Decimal(1000001, 8, 6))
}
+ test("cast large seconds to decimal") {
+ checkEvaluation(
+ Cast(
+ Cast(Literal(Decimal("9223372036854.775807")), DayTimeIntervalType(3,
3)),
+ DecimalType(19, 6)
+ ),
+ Decimal("9223372036854.775807")
+ )
+ }
+
test("multiply") {
def check(
interval: String,
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]