voonhous commented on code in PR #17772:
URL: https://github.com/apache/hudi/pull/17772#discussion_r2659002765


##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -75,7 +75,7 @@ void testConvertColumnValueForLogicalTypeWithDate() {
     LogicalTypes.date().addToSchema(dateSchema);
     
     IntWritable dateValue = new IntWritable(18628); // 2021-01-01
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(dateSchema, dateValue, true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(dateSchema),
 dateValue, true);

Review Comment:
   Same here



##########
hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/client/model/HoodieFlinkRecord.java:
##########
@@ -205,13 +206,13 @@ private Object getColumnValue(Schema recordSchema, String 
column, Properties pro
   }
 
   @Override
-  public HoodieRecord joinWith(HoodieRecord other, Schema targetSchema) {
+  public HoodieRecord joinWith(HoodieRecord other, HoodieSchema targetSchema) {
     throw new UnsupportedOperationException("Not supported for " + 
this.getClass().getSimpleName());
   }
 
   @Override
-  public HoodieRecord prependMetaFields(Schema recordSchema, Schema 
targetSchema, MetadataValues metadataValues, Properties props) {
-    int metaFieldSize = targetSchema.getFields().size() - 
recordSchema.getFields().size();
+  public HoodieRecord prependMetaFields(HoodieSchema recordSchema, 
HoodieSchema targetSchema, MetadataValues metadataValues, Properties props) {
+    int metaFieldSize = targetSchema.getAvroSchema().getFields().size() - 
recordSchema.getAvroSchema().getFields().size();

Review Comment:
   Super Nitpick. I see changes using `toAvroSchema` and `getAvroSchema`. 
   
   Feel free to ignore this, it's very trivial. 



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -148,7 +148,7 @@ void testConvertColumnValueForLogicalTypeWithString() {
     Schema stringSchema = Schema.create(Schema.Type.STRING);
     
     Text stringValue = new Text("test string");
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(stringSchema, stringValue, 
true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(stringSchema),
 stringValue, true);

Review Comment:
   Same here.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -124,7 +124,7 @@ void 
testConvertColumnValueForLogicalTypeWithTimestampMicrosDisabled() {
     LogicalTypes.timestampMicros().addToSchema(timestampMicrosSchema);
     
     LongWritable timestampValue = new LongWritable(1609459200000000L);
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(timestampMicrosSchema, 
timestampValue, false);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(timestampMicrosSchema),
 timestampValue, false);

Review Comment:
   Same here.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -87,7 +87,7 @@ void 
testConvertColumnValueForLogicalTypeWithTimestampMillis() {
     LogicalTypes.timestampMillis().addToSchema(timestampMillisSchema);
     
     LongWritable timestampValue = new LongWritable(1609459200000L); // 
2021-01-01 00:00:00 UTC
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(timestampMillisSchema, 
timestampValue, true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(timestampMillisSchema),
 timestampValue, true);

Review Comment:
   Same here, we can use `HoodieSchema.createTimestampMillis`.



##########
hudi-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieFileSliceTestUtils.java:
##########
@@ -230,7 +230,7 @@ public static HoodieDeleteBlock getDeleteBlock(
     return new HoodieDeleteBlock(
         hoodieRecords.stream().map(
             r -> Pair.of(DeleteRecord.create(
-                r.getKey(), r.getOrderingValue(schema, props, 
orderingFields)), r.getCurrentLocation().getPosition()))
+                r.getKey(), 
r.getOrderingValue(HoodieSchema.fromAvroSchema(schema), props, 
orderingFields)), r.getCurrentLocation().getPosition()))

Review Comment:
   Sorry, missed this one, i think we can update the entire callchain for 
`getDeleteBlock` to accept `HoodieSchema` from the top level to fully remove 
`Avro.Schema`.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -172,7 +172,7 @@ void testConvertColumnValueForLogicalTypeWithLongWritable() 
{
     Schema stringSchema = Schema.create(Schema.Type.STRING);
     
     LongWritable longValue = new LongWritable(12345L);
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(stringSchema, longValue, 
true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(stringSchema),
 longValue, true);

Review Comment:
   Same here.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -112,7 +112,7 @@ void 
testConvertColumnValueForLogicalTypeWithTimestampMicros() {
     LogicalTypes.timestampMicros().addToSchema(timestampMicrosSchema);
     
     LongWritable timestampValue = new LongWritable(1609459200000000L); // 
2021-01-01 00:00:00 UTC in microseconds
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(timestampMicrosSchema, 
timestampValue, true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(timestampMicrosSchema),
 timestampValue, true);

Review Comment:
   Same here.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -65,7 +65,7 @@ void testConvertColumnValueForLogicalTypeWithNullValue() {
     Schema dateSchema = Schema.create(Schema.Type.INT);

Review Comment:
   Let's change this to `HoodieSchema.create(HoodieSchemaType.INT)`.
   Line 68 will not need to be wrapped with `HoodieSchema#fromAvroSchema`.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -99,7 +99,7 @@ void 
testConvertColumnValueForLogicalTypeWithTimestampMillisDisabled() {
     LogicalTypes.timestampMillis().addToSchema(timestampMillisSchema);
     
     LongWritable timestampValue = new LongWritable(1609459200000L);
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(timestampMillisSchema, 
timestampValue, false);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(timestampMillisSchema),
 timestampValue, false);

Review Comment:
   Same here.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -137,7 +137,7 @@ void testConvertColumnValueForLogicalTypeWithDecimal() {
     LogicalTypes.decimal(10, 2).addToSchema(decimalSchema);
     
     HiveDecimalWritable decimalValue = new HiveDecimalWritable("123.45");
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(decimalSchema, decimalValue, 
true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(decimalSchema),
 decimalValue, true);

Review Comment:
   Same here.



##########
hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieHiveRecord.java:
##########
@@ -160,7 +160,7 @@ void testConvertColumnValueForLogicalTypeWithIntWritable() {
     Schema stringSchema = Schema.create(Schema.Type.STRING);
     
     IntWritable intValue = new IntWritable(42);
-    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(stringSchema, intValue, true);
+    Object result = 
hoodieHiveRecord.convertColumnValueForLogicalType(HoodieSchema.fromAvroSchema(stringSchema),
 intValue, true);

Review Comment:
   Same here.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to