Blizzara commented on code in PR #716: URL: https://github.com/apache/datafusion-comet/pull/716#discussion_r1690532403
########## native/spark-expr/src/cast.rs: ########## @@ -502,158 +502,163 @@ impl Cast { eval_mode, } } +} - fn cast_array(&self, array: ArrayRef) -> DataFusionResult<ArrayRef> { - let to_type = &self.data_type; - let array = array_with_timezone(array, self.timezone.clone(), Some(to_type))?; - let from_type = array.data_type().clone(); - let array = match &from_type { - DataType::Dictionary(key_type, value_type) - if key_type.as_ref() == &DataType::Int32 - && (value_type.as_ref() == &DataType::Utf8 - || value_type.as_ref() == &DataType::LargeUtf8) => - { - let dict_array = array - .as_any() - .downcast_ref::<DictionaryArray<Int32Type>>() - .expect("Expected a dictionary array"); - - let casted_dictionary = DictionaryArray::<Int32Type>::new( - dict_array.keys().clone(), - self.cast_array(dict_array.values().clone())?, - ); - - let casted_result = match to_type { - DataType::Dictionary(_, _) => Arc::new(casted_dictionary.clone()), - _ => take(casted_dictionary.values().as_ref(), dict_array.keys(), None)?, - }; - return Ok(spark_cast(casted_result, &from_type, to_type)); - } - _ => array, - }; - let from_type = array.data_type(); - - let cast_result = match (from_type, to_type) { - (DataType::Utf8, DataType::Boolean) => { - Self::spark_cast_utf8_to_boolean::<i32>(&array, self.eval_mode) - } - (DataType::LargeUtf8, DataType::Boolean) => { - Self::spark_cast_utf8_to_boolean::<i64>(&array, self.eval_mode) - } - (DataType::Utf8, DataType::Timestamp(_, _)) => { - Self::cast_string_to_timestamp(&array, to_type, self.eval_mode) - } - (DataType::Utf8, DataType::Date32) => { - Self::cast_string_to_date(&array, to_type, self.eval_mode) - } - (DataType::Int64, DataType::Int32) - | (DataType::Int64, DataType::Int16) - | (DataType::Int64, DataType::Int8) - | (DataType::Int32, DataType::Int16) - | (DataType::Int32, DataType::Int8) - | (DataType::Int16, DataType::Int8) - if self.eval_mode != EvalMode::Try => - { - Self::spark_cast_int_to_int(&array, self.eval_mode, from_type, to_type) - } - ( - DataType::Utf8, - DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64, - ) => Self::cast_string_to_int::<i32>(to_type, &array, self.eval_mode), - ( - DataType::LargeUtf8, - DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64, - ) => Self::cast_string_to_int::<i64>(to_type, &array, self.eval_mode), - (DataType::Float64, DataType::Utf8) => { - Self::spark_cast_float64_to_utf8::<i32>(&array, self.eval_mode) - } - (DataType::Float64, DataType::LargeUtf8) => { - Self::spark_cast_float64_to_utf8::<i64>(&array, self.eval_mode) - } - (DataType::Float32, DataType::Utf8) => { - Self::spark_cast_float32_to_utf8::<i32>(&array, self.eval_mode) - } - (DataType::Float32, DataType::LargeUtf8) => { - Self::spark_cast_float32_to_utf8::<i64>(&array, self.eval_mode) - } - (DataType::Float32, DataType::Decimal128(precision, scale)) => { - Self::cast_float32_to_decimal128(&array, *precision, *scale, self.eval_mode) - } - (DataType::Float64, DataType::Decimal128(precision, scale)) => { - Self::cast_float64_to_decimal128(&array, *precision, *scale, self.eval_mode) - } - (DataType::Float32, DataType::Int8) - | (DataType::Float32, DataType::Int16) - | (DataType::Float32, DataType::Int32) - | (DataType::Float32, DataType::Int64) - | (DataType::Float64, DataType::Int8) - | (DataType::Float64, DataType::Int16) - | (DataType::Float64, DataType::Int32) - | (DataType::Float64, DataType::Int64) - | (DataType::Decimal128(_, _), DataType::Int8) - | (DataType::Decimal128(_, _), DataType::Int16) - | (DataType::Decimal128(_, _), DataType::Int32) - | (DataType::Decimal128(_, _), DataType::Int64) - if self.eval_mode != EvalMode::Try => - { - Self::spark_cast_nonintegral_numeric_to_integral( - &array, - self.eval_mode, - from_type, - to_type, - ) - } - _ if Self::is_datafusion_spark_compatible(from_type, to_type) => { - // use DataFusion cast only when we know that it is compatible with Spark - Ok(cast_with_options(&array, to_type, &CAST_OPTIONS)?) - } - _ => { - // we should never reach this code because the Scala code should be checking - // for supported cast operations and falling back to Spark for anything that - // is not yet supported - Err(SparkError::Internal(format!( - "Native cast invoked for unsupported cast from {from_type:?} to {to_type:?}" - ))) - } - }; - Ok(spark_cast(cast_result?, from_type, to_type)) +pub fn spark_cast_array( Review Comment: added some in https://github.com/apache/datafusion-comet/pull/716/commits/eaf224d2ba621456942a3fd4c62c7acc2ce3b387! Though not sure what exactly would be useful here, lmk if you had anything specifc in mind :) -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org For additional commands, e-mail: github-h...@datafusion.apache.org