vinothchandar commented on a change in pull request #2421:
URL: https://github.com/apache/hudi/pull/2421#discussion_r554079488
##########
File path:
hudi-common/src/test/java/org/apache/hudi/common/table/TestTimelineUtils.java
##########
@@ -181,10 +187,113 @@ public void testRestoreInstants() throws Exception {
// verify modified partitions included cleaned data
List<String> partitions =
TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("1",
10));
- assertEquals(partitions, Arrays.asList(new String[]{"2", "3", "4", "5"}));
+ assertEquals(partitions, Arrays.asList(new String[] {"2", "3", "4", "5"}));
partitions =
TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsInRange("1",
"4"));
- assertEquals(partitions, Arrays.asList(new String[]{"2", "3", "4"}));
+ assertEquals(partitions, Arrays.asList(new String[] {"2", "3", "4"}));
+ }
+
+ @Test
+ public void testHoodieRestoreMetadataSerDeser() throws IOException {
+
+ String partitionPath1 = "/partitionPath1/";
+ String partitionPath2 = "/partitionPath2/";
+ String partitionPath3 = "/partitionPath3/";
+ //prepare HoodieRollbackStat for different partition
+ Map<FileStatus, Boolean> dataFilesOnlyStat1Files = new HashMap<>();
+ dataFilesOnlyStat1Files.put(generateFileStatus(partitionPath1 +
"dataFile1.parquet"), true);
+ dataFilesOnlyStat1Files.put(generateFileStatus(partitionPath1 +
"dataFile2.parquet"), true);
+ HoodieRollbackStat dataFilesOnlyStat1 = HoodieRollbackStat.newBuilder()
+ .withPartitionPath(partitionPath1)
+ .withDeletedFileResults(dataFilesOnlyStat1Files).build();
+
+ Map<FileStatus, Long> dataFilesOnlyStat2Files = new HashMap<>();
+ dataFilesOnlyStat2Files.put(generateFileStatus(partitionPath2 +
"dataFile1.parquet"), 5L);
+ dataFilesOnlyStat2Files.put(generateFileStatus(partitionPath2 +
"dataFile2.parquet"), 20L);
+ HoodieRollbackStat dataFilesOnlyStat2 = HoodieRollbackStat.newBuilder()
+ .withPartitionPath(partitionPath2)
+ .withRollbackBlockAppendResults(dataFilesOnlyStat2Files).build();
+
+ Map<FileStatus, Long> dataFilesOnlyStat3Files = new HashMap<>();
+ dataFilesOnlyStat3Files.put(generateFileStatus(partitionPath2 +
"dataFile1.parquet"), 100L);
+ dataFilesOnlyStat3Files.put(generateFileStatus(partitionPath2 +
"dataFile2.parquet"), 200000L);
+ HoodieRollbackStat dataFilesOnlyStat3 = HoodieRollbackStat.newBuilder()
+ .withPartitionPath(partitionPath3)
+ .withProbableLogFileToSizeMap(dataFilesOnlyStat3Files).build();
+
+ List<HoodieRollbackStat> rollbackStats = new ArrayList<>();
+ rollbackStats.add(dataFilesOnlyStat1);
+ rollbackStats.add(dataFilesOnlyStat2);
+ rollbackStats.add(dataFilesOnlyStat3);
+
+ List<HoodieInstant> instants = new ArrayList<>();
+ for (int i = 1; i <= 5; i++) {
+ String ts = i + "";
+ HoodieInstant instant = new HoodieInstant(true,
HoodieTimeline.COMMIT_ACTION, ts);
+ instants.add(instant);
+ }
+
+ HoodieRollbackMetadata rollbackMetadata =
TimelineMetadataUtils.convertRollbackMetadata("001", Option.of(1234L),
+ instants, rollbackStats);
+
+ Option<byte[]> ser =
TimelineMetadataUtils.serializeRollbackMetadata(rollbackMetadata);
+ if (ser.isPresent()) {
+ HoodieRollbackMetadata rollbackMetadata1 =
TimelineMetadataUtils.deserializeHoodieRollbackMetadata(ser.get());
+ System.out.println("deser successfully ");
+ for (Entry<String, HoodieRollbackPartitionMetadata> rollbackStat :
rollbackMetadata1.getPartitionMetadata().entrySet()) {
+ System.out.println("Deser value fro " + rollbackStat.getKey());
+ HoodieRollbackPartitionMetadata stat = rollbackStat.getValue();
+ System.out.println(
+ "1111 Rollback stat for partition " + stat.getPartitionPath() + "
success files " + (stat.getSuccessDeleteFiles() != null ? Arrays
+ .toString(stat.getSuccessDeleteFiles().toArray(new String[0]))
+ : "null")
+ + ", delete files :: " + (stat.getSuccessDeleteFiles() != null
? Arrays.toString(stat.getFailedDeleteFiles().toArray(new String[0])) : "null")
+ + ", rollback log files " + stat
+ .getRollbackLogFiles().entrySet()
+ + ", probable/written log files : " +
stat.getWrittenLogFiles().entrySet());
+ }
+ }
+
+
System.out.println("---------------------------------------------------------------------------");
+
System.out.println("---------------------------------------------------------------------------");
+
System.out.println("---------------------------------------------------------------------------");
+
+ Map<String, List<HoodieRollbackMetadata>> instantToRollbackMetadata = new
HashMap<>();
+ instantToRollbackMetadata.put("abc",
Collections.singletonList(rollbackMetadata));
+
+ HoodieRestoreMetadata restoreMetadata =
TimelineMetadataUtils.convertRestoreMetadata("002", 10L,
+ instants, instantToRollbackMetadata);
+ ser = TimelineMetadataUtils.serializeRestoreMetadata(restoreMetadata);
+ if (ser.isPresent()) {
+ HoodieRestoreMetadata restoreMetadata1 =
TimelineMetadataUtils.deserializeHoodieRestoreMetadata(ser.get());
+ System.out.println("deser successfully ");
+
+ for (Entry<String, List<HoodieRollbackMetadata>> entry :
restoreMetadata1.getHoodieRestoreMetadata().entrySet()) {
+ List<HoodieRollbackMetadata> rollbackMetadatas = entry.getValue();
+ for (HoodieRollbackMetadata rollbackMetadata2 : rollbackMetadatas) {
+ for (Entry<String, HoodieRollbackPartitionMetadata> rollbackStat :
rollbackMetadata2.getPartitionMetadata().entrySet()) {
+ System.out.println("Deser value fro " + rollbackStat.getKey());
+ HoodieRollbackPartitionMetadata stat = rollbackStat.getValue();
+ System.out.println(
Review comment:

Can you try setting a breakpoint here and see if these fields actually get
written into the outputstream.
My guess is they are. but the schema used for reading is different. Make
sure you recompile the entire project from command line once. `mvn clean
install ....` before running the test via IDE
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]