MaxGekk commented on a change in pull request #31951:
URL: https://github.com/apache/spark/pull/31951#discussion_r600844914
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
##########
@@ -295,3 +295,43 @@ case class MultiplyYMInterval(
override def toString: String = s"($left * $right)"
}
+
+// Multiply a day-time interval by a numeric
+case class MultiplyDTInterval(
+ interval: Expression,
+ num: Expression)
+ extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant
with Serializable {
+ override def left: Expression = interval
+ override def right: Expression = num
+
+ override def inputTypes: Seq[AbstractDataType] = Seq(DayTimeIntervalType,
NumericType)
+ override def dataType: DataType = DayTimeIntervalType
+
+ @transient
+ private lazy val evalFunc: (Long, Any) => Any = right.dataType match {
+ case _: IntegralType => (micros: Long, num) =>
+ Math.multiplyExact(micros, num.asInstanceOf[Number].longValue())
+ case _: DecimalType => (micros: Long, num) =>
+ val decimalRes = ((new Decimal).set(micros) *
num.asInstanceOf[Decimal]).toJavaBigDecimal
+ decimalRes.setScale(0, java.math.RoundingMode.HALF_UP).longValueExact()
+ case _: FractionalType => (micros: Long, num) =>
+ Math.round(micros * num.asInstanceOf[Number].doubleValue())
+ }
+
+ override def nullSafeEval(interval: Any, num: Any): Any = {
+ evalFunc(interval.asInstanceOf[Long], num)
+ }
+
+ override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
right.dataType match {
+ case _: IntegralType =>
+ defineCodeGen(ctx, ev, (m, n) => s"java.lang.Math.multiplyExact($m, $n)")
+ case _: DecimalType =>
+ defineCodeGen(ctx, ev, (m, n) =>
+ s"((new Decimal()).set($m).$$times($n)).toJavaBigDecimal()" +
+ ".setScale(0, java.math.RoundingMode.HALF_UP).longValueExact()")
+ case _: FractionalType =>
+ defineCodeGen(ctx, ev, (m, n) => s"java.lang.Math.round($m *
(double)$n)")
Review comment:
> Shall we turn float/double to Decimal and do the calculation?
Decimal (and Java BigDecimal) doesn't have representation for `NaN`,
`PositiveInfinity` and `NegativeInfinity`, see:
```scala
scala> import org.apache.spark.sql.types.Decimal
import org.apache.spark.sql.types.Decimal
scala> Decimal(Double.NaN)
java.lang.NumberFormatException
at java.math.BigDecimal.<init>(BigDecimal.java:497)
scala> Decimal(Float.PositiveInfinity)
java.lang.NumberFormatException
at java.math.BigDecimal.<init>(BigDecimal.java:497)
scala> Decimal(Float.MinValue)
res2: org.apache.spark.sql.types.Decimal =
-340282346638528860000000000000000000000
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]