Github user marmbrus commented on a diff in the pull request:
https://github.com/apache/spark/pull/2983#discussion_r19699984
--- Diff:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
---
@@ -250,27 +252,46 @@ case class Cast(child: Expression, dataType:
DataType) extends UnaryExpression w
buildCast[Date](_, d => dateToLong(d))
case TimestampType =>
buildCast[Timestamp](_, t => timestampToLong(t).toByte)
- case DecimalType =>
- buildCast[BigDecimal](_, _.toByte)
+ case DecimalType() =>
+ buildCast[Decimal](_, _.toByte)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte
}
- // DecimalConverter
- private[this] def castToDecimal: Any => Any = child.dataType match {
+ /**
+ * Change the precision / scale in a given decimal to those set in
`decimalType` (if any),
+ * returning null if it overflows or modifying `value` in-place and
returning it if successful.
+ *
+ * NOTE: this modifies `value` in-place, so don't call it on external
data.
+ */
+ private[this] def changePrecision(value: Decimal, decimalType:
DecimalType): Decimal = {
+ decimalType match {
+ case DecimalType.Unlimited =>
+ value
+ case DecimalType.Fixed(precision, scale) =>
+ if (value.changePrecision(precision, scale)) value else null
+ }
+ }
+
+ private[this] def castToDecimal(target: DecimalType): Any => Any =
child.dataType match {
+ // TODO: handle casts of null to decimal?
--- End diff --
I believe null is generally handled elsewhere.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]