andygrove commented on code in PR #910:
URL: https://github.com/apache/datafusion-comet/pull/910#discussion_r1747185395
##########
native/spark-expr/src/scalar_funcs.rs:
##########
@@ -547,3 +551,32 @@ pub fn spark_isnan(args: &[ColumnarValue]) ->
Result<ColumnarValue, DataFusionEr
},
}
}
+
+/// Spark-compatible `date_add` expression, which assumes days for the second
argument, but we
+/// cannot directly add that to a Date32. We generate an IntervalDayTime from
the second argument,
+/// and use DataFusion's interface to apply Arrow's add operator.
+pub fn spark_date_add(args: &[ColumnarValue]) -> Result<ColumnarValue,
DataFusionError> {
+ let start = &args[0];
+ if let ColumnarValue::Scalar(ScalarValue::Int32(Some(days))) = &args[1] {
+ let interval = IntervalDayTime::new(*days, 0);
+ let interval_cv =
ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(interval)));
+ let result = datum::apply(start, &interval_cv, add)?;
+ return Ok(result);
+ } else if let ColumnarValue::Array(days) = &args[1] {
+ let mut interval_builder = IntervalDayTimeBuilder::new();
+ for day in days.as_primitive::<Int32Type>().into_iter() {
+ if let Some(non_null_day) = day {
+
interval_builder.append_value(IntervalDayTime::new(non_null_day, 0));
+ } else {
+ interval_builder.append_null();
+ }
+ }
+ let interval_cv =
ColumnarValue::Array(Arc::new(interval_builder.finish()));
+ let result = datum::apply(start, &interval_cv, add)?;
+ return Ok(result);
Review Comment:
Same comment re unwrapping result:
```suggestion
return datum::apply(start, &interval_cv, add);
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]