cloud-fan commented on code in PR #36698:
URL: https://github.com/apache/spark/pull/36698#discussion_r913192462
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##########
@@ -208,12 +210,46 @@ case class Abs(child: Expression, failOnError: Boolean =
SQLConf.get.ansiEnabled
override protected def withNewChildInternal(newChild: Expression): Abs =
copy(child = newChild)
}
-abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant
- with SupportQueryContext {
+abstract class BinaryArithmetic extends BinaryOperator
+ with NullIntolerant with SupportQueryContext {
protected val failOnError: Boolean
- override def dataType: DataType = left.dataType
+ override def checkInputDataTypes(): TypeCheckResult = (left.dataType,
right.dataType) match {
+ case (l: DecimalType, r: DecimalType) if inputType.acceptsType(l) &&
inputType.acceptsType(r) =>
+ // We allow decimal type inputs with different precision and scale, and
use special formulas
+ // to calculate the result precision and scale.
+ TypeCheckResult.TypeCheckSuccess
+ case _ => super.checkInputDataTypes()
+ }
+
+ override def dataType: DataType = (left.dataType, right.dataType) match {
+ case (DecimalType.Fixed(p1, s1), DecimalType.Fixed(p2, s2)) =>
+ resultDecimalType(p1, s1, p2, s2)
+ case _ => left.dataType
Review Comment:
nit: `case (leftType, _) => leftType`. `Expression.dataType` is not a lazy
val and let's avoid repeated invocations.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]