bvaradar commented on a change in pull request #1157: [HUDI-332]Add operation 
type (insert/upsert/bulkinsert/delete) to HoodieCommitMetadata
URL: https://github.com/apache/incubator-hudi/pull/1157#discussion_r365069807
 
 

 ##########
 File path: 
hudi-client/src/test/java/org/apache/hudi/io/TestHoodieCommitArchiveLog.java
 ##########
 @@ -397,10 +402,81 @@ public void testArchiveCommitCompactionNoHole() throws 
IOException {
         timeline.containsInstant(new HoodieInstant(false, 
HoodieTimeline.COMMIT_ACTION, "107")));
   }
 
+  @Test
+  public void testArchiveCommitAndDeepCopy() throws IOException {
+    HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(basePath)
+        
.withSchema(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA).withParallelism(2, 
2).forTable("test-trip-table")
+        
.withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2,
 5).build())
+        .build();
+    metaClient = HoodieTableMetaClient.reload(metaClient);
+    HoodieCommitArchiveLog archiveLog = new HoodieCommitArchiveLog(cfg, 
metaClient);
+    HoodieTestDataGenerator.createCommitFile(basePath, "100", dfs.getConf());
+    HoodieTestDataGenerator.createCompactionRequestedFile(basePath, "101", 
dfs.getConf());
+    HoodieTestDataGenerator.createCompactionAuxiliaryMetadata(basePath,
+        new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, 
"101"), dfs.getConf());
+    HoodieTestDataGenerator.createCommitFile(basePath, "102", dfs.getConf());
+    HoodieTestDataGenerator.createCommitFile(basePath, "103", dfs.getConf());
+    HoodieTestDataGenerator.createCompactionRequestedFile(basePath, "104", 
dfs.getConf());
+    HoodieTestDataGenerator.createCompactionAuxiliaryMetadata(basePath,
+        new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, 
"104"), dfs.getConf());
+    HoodieTestDataGenerator.createCommitFile(basePath, "105", dfs.getConf());
+    HoodieTestDataGenerator.createCommitFile(basePath, "106", dfs.getConf());
+    HoodieTestDataGenerator.createCommitFile(basePath, "107", dfs.getConf());
+
+    assertTrue(archiveLog.archiveIfRequired(jsc));
+
+    // read the file
+    Reader reader =
+        HoodieLogFormat.newReader(dfs, new HoodieLogFile(new Path(basePath + 
"/.hoodie/.commits_.archive.1_1-0-1")),
+            HoodieArchivedMetaEntry.getClassSchema());
+    List<IndexedRecord> readRecords = new ArrayList<>();
+    // read the avro blocks and validate the number of records written in each 
avro block
+    while (reader.hasNext()) {
+      HoodieAvroDataBlock blk = (HoodieAvroDataBlock) reader.next();
+      List<IndexedRecord> records = blk.getRecords();
+      readRecords.addAll(records);
+    }
+
+    HoodieArchivedMetaEntry metaEntry = 
(HoodieArchivedMetaEntry)SpecificData.get()
+        .deepCopy(HoodieArchivedMetaEntry.getClassSchema(), 
readRecords.get(0));
+
+    assertEquals("operationType must be UNKNOWN by default.",
+        metaEntry.getHoodieCommitMetadata().getOperationType(), "UNKNOWN");
+    reader.close();
+  }
+
   private void verifyInflightInstants(HoodieTableMetaClient metaClient, int 
expectedTotalInstants) {
     HoodieTimeline timeline = metaClient.getActiveTimeline().reload()
         
.getTimelineOfActions(Sets.newHashSet(HoodieTimeline.CLEAN_ACTION)).filterInflights();
     assertEquals("Loaded inflight clean actions and the count should match", 
expectedTotalInstants,
         timeline.countInstants());
   }
+
+  @Test
+  public void testCommitMetadataConverter() {
+    HoodieCommitMetadata hoodieCommitMetadata = new HoodieCommitMetadata();
+    hoodieCommitMetadata.setOperationType(WriteOperationType.INSERT);
+
+    HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(basePath)
+        
.withSchema(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA).withParallelism(2, 
2).forTable("test-commitMetadata-converter")
+        
.withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2,
 5).build())
+        .build();
+    metaClient = HoodieTableMetaClient.reload(metaClient);
+    HoodieCommitArchiveLog archiveLog = new HoodieCommitArchiveLog(cfg, 
metaClient);
+
+    Class<?> clazz  = HoodieCommitArchiveLog.class;
+    try {
+      Method commitMetadataConverter = 
clazz.getDeclaredMethod("commitMetadataConverter", HoodieCommitMetadata.class);
+      commitMetadataConverter.setAccessible(true);
+      org.apache.hudi.avro.model.HoodieCommitMetadata expectedCommitMetadata =
+          (org.apache.hudi.avro.model.HoodieCommitMetadata) 
commitMetadataConverter.invoke(archiveLog, hoodieCommitMetadata);
+      assertEquals(expectedCommitMetadata.getOperationType(), 
WriteOperationType.INSERT.toString());
+    } catch (NoSuchMethodException e) {
+      e.printStackTrace();
 
 Review comment:
   Instead of e.printStackTrace() in all these catch blocks, can you throw 
these exception for the test to fail

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to