voonhous commented on code in PR #17535:
URL: https://github.com/apache/hudi/pull/17535#discussion_r2609078047
##########
hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/io/storage/row/HoodieRowParquetWriteSupport.java:
##########
@@ -240,20 +242,32 @@ private ValueWriter makeWriter(Schema avroSchema,
DataType dataType) {
} else if (dataType == DataTypes.LongType || dataType instanceof
DayTimeIntervalType) {
return (row, ordinal) -> recordConsumer.addLong(row.getLong(ordinal));
} else if (dataType == DataTypes.TimestampType) {
- if (logicalType == null ||
logicalType.getName().equals(LogicalTypes.timestampMicros().getName())) {
- return (row, ordinal) -> recordConsumer.addLong((long)
timestampRebaseFunction.apply(row.getLong(ordinal)));
- } else if
(logicalType.getName().equals(LogicalTypes.timestampMillis().getName())) {
- return (row, ordinal) ->
recordConsumer.addLong(DateTimeUtils.microsToMillis((long)
timestampRebaseFunction.apply(row.getLong(ordinal))));
+ if (resolvedSchema instanceof HoodieSchema.Timestamp) {
+ HoodieSchema.Timestamp timestampSchema = (HoodieSchema.Timestamp)
resolvedSchema;
+ if (timestampSchema.getPrecision() == TimePrecision.MICROS) {
+ return (row, ordinal) -> recordConsumer.addLong((long)
timestampRebaseFunction.apply(row.getLong(ordinal)));
+ } else {
+ return (row, ordinal) ->
recordConsumer.addLong(DateTimeUtils.microsToMillis((long)
timestampRebaseFunction.apply(row.getLong(ordinal))));
+ }
} else {
- throw new UnsupportedOperationException("Unsupported Avro logical type
for TimestampType: " + logicalType);
+ // Default to micros precision when no timestamp schema is available
+ return (row, ordinal) -> recordConsumer.addLong((long)
timestampRebaseFunction.apply(row.getLong(ordinal)));
}
} else if
(SparkAdapterSupport$.MODULE$.sparkAdapter().isTimestampNTZType(dataType)) {
- if (logicalType == null ||
logicalType.getName().equals(LogicalTypes.localTimestampMicros().getName())) {
- return (row, ordinal) -> recordConsumer.addLong(row.getLong(ordinal));
- } else if
(logicalType.getName().equals(LogicalTypes.localTimestampMillis().getName())) {
- return (row, ordinal) ->
recordConsumer.addLong(DateTimeUtils.microsToMillis(row.getLong(ordinal)));
+ if (resolvedSchema instanceof HoodieSchema.Timestamp) {
Review Comment:
Done
##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/utils/TestHoodieArrayWritableAvroUtils.java:
##########
@@ -326,43 +331,37 @@ private void validateRewriteWithAvro(
ObjectInspector writableOINew = getWritableOIForType(newTypeInfo);
Object javaInput = ObjectInspectorConverters.getConverter(writableOIOld,
oldObjectInspector).convert(oldWritable);
- if (isDecimalSchema(oldSchema)) {
- javaInput =
HoodieAvroUtils.DECIMAL_CONVERSION.toFixed(getDecimalValue(javaInput,
oldSchema), oldSchema, oldSchema.getLogicalType());
+ if (oldSchema instanceof HoodieSchema.Decimal) {
Review Comment:
CMIIW, do you mean we use Avro.Schema type here?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]