the-other-tim-brown commented on code in PR #17763:
URL: https://github.com/apache/hudi/pull/17763#discussion_r2655540600


##########
hudi-hadoop-common/src/test/java/org/apache/parquet/avro/TestAvroSchemaConverter.java:
##########
@@ -450,10 +445,9 @@ public void testUnknownTwoLevelListOfLists() throws 
Exception {
     // This tests the case where we don't detect a 2-level list by the repeated
     // group's name, but it must be 2-level because the repeated group doesn't
     // contain an optional or repeated element as required for 3-level lists
-    Schema listOfLists = 
optional(Schema.createArray(Schema.createArray(Schema.create(INT))));
-    Schema schema = Schema.createRecord("UnknownTwoLevelListInList", null, 
null, false);
-    schema.setFields(
-        Arrays.asList(new Schema.Field("listOfLists", listOfLists, null, 
JsonProperties.NULL_VALUE)));
+    HoodieSchema listOfLists = 
HoodieSchema.createNullable(HoodieSchema.createArray(HoodieSchema.createArray(HoodieSchema.create(HoodieSchemaType.INT))));
+    HoodieSchema schema = 
HoodieSchema.createRecord("UnknownTwoLevelListInList", null, null, false,
+        Collections.singletonList(HoodieSchemaField.of("listOfLists", 
listOfLists, null, HoodieSchema.NULL_VALUE)));
     System.err.println("Avro schema: " + schema.toString(true));

Review Comment:
   let's remove this println and any others in the file while we're updating 
this file



##########
hudi-hadoop-common/src/test/java/org/apache/parquet/avro/TestAvroSchemaConverter.java:
##########
@@ -819,33 +832,34 @@ public void testReuseNameInNestedStructureAtSameLevel() 
throws Exception {
 
   @Test
   public void testUUIDType() throws Exception {
-    Schema fromAvro = Schema.createRecord(
+    HoodieSchema fromSchema = HoodieSchema.createRecord(
         "myrecord",
         null,
         null,
         false,
-        Arrays.asList(
-            new Schema.Field("uuid", 
LogicalTypes.uuid().addToSchema(Schema.create(STRING)), null, null)));
+        Collections.singletonList(
+            HoodieSchemaField.of("uuid", HoodieSchema.createUUID(), null, 
null)));
     String parquet = "message myrecord {\n" + "  required binary uuid 
(STRING);\n" + "}\n";
-    Schema toAvro = Schema.createRecord(
+    HoodieSchema toSchema = HoodieSchema.createRecord(
         "myrecord",
         null,
         null,
         false,
-        Arrays.asList(new Schema.Field("uuid", Schema.create(STRING), null, 
null)));
+        Collections.singletonList(HoodieSchemaField.of("uuid", 
HoodieSchema.create(HoodieSchemaType.STRING), null, null)));
 
-    testAvroToParquetConversion(fromAvro, parquet);
-    testParquetToAvroConversion(toAvro, parquet);
+    testAvroToParquetConversion(fromSchema, parquet);
+    testParquetToAvroConversion(toSchema, parquet);
 
     assertEquals(
-        COMPATIBLE, checkReaderWriterCompatibility(fromAvro, 
toAvro).getType());
+        COMPATIBLE, checkReaderWriterCompatibility(fromSchema.toAvroSchema(), 
toSchema.toAvroSchema()).getType());

Review Comment:
   We can update this to 
`assertTrue(HoodieSchemaCompatibility.areSchemasCompatible(fromSchema, 
toSchema)`



##########
hudi-hadoop-common/src/test/java/org/apache/parquet/avro/TestAvroSchemaConverter.java:
##########
@@ -306,9 +303,9 @@ public void testOptionalMapValue() throws Exception {
 
   @Test
   public void testOptionalArrayElement() throws Exception {
-    Schema schema = Schema.createRecord("record1", null, null, false);
-    Schema optionalIntArray = Schema.createArray(optional(Schema.create(INT)));
-    schema.setFields(Arrays.asList(new Schema.Field("myintarray", 
optionalIntArray, null, null)));
+    HoodieSchema optionalIntArray = 
HoodieSchema.createArray(HoodieSchema.createNullable(HoodieSchema.create(HoodieSchemaType.INT)));

Review Comment:
   Just a tip, you can directly use 
`HoodieSchema.createNullable(HoodieSchemaType.INT)` to shorten the setup in the 
tests



##########
hudi-hadoop-common/src/test/java/org/apache/parquet/avro/TestAvroSchemaConverter.java:
##########
@@ -340,14 +339,13 @@ public void testUnionOfTwoTypes() throws Exception {
 
   @Test
   public void testArrayOfOptionalRecords() throws Exception {
-    Schema innerRecord = Schema.createRecord("element", null, null, false);
-    Schema optionalString = optional(Schema.create(Schema.Type.STRING));
-    innerRecord.setFields(Arrays.asList(
-        new Schema.Field("s1", optionalString, null, 
JsonProperties.NULL_VALUE),
-        new Schema.Field("s2", optionalString, null, 
JsonProperties.NULL_VALUE)));
-    Schema schema = Schema.createRecord("HasArray", null, null, false);
-    schema.setFields(
-        Arrays.asList(new Schema.Field("myarray", 
Schema.createArray(optional(innerRecord)), null, null)));
+    HoodieSchema optionalString = 
HoodieSchema.createNullable(HoodieSchema.create(HoodieSchemaType.STRING));
+    List<HoodieSchemaField> innerRecordFields = Arrays.asList(
+        HoodieSchemaField.of("s1", optionalString, null, 
JsonProperties.NULL_VALUE),

Review Comment:
   Should we update `JsonProperties.NULL_VALUE` to `HoodieSchema.NULL_VALUE`?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to