nsivabalan commented on code in PR #13495:
URL: https://github.com/apache/hudi/pull/13495#discussion_r2220276279
##########
hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/table/HoodieFlinkCopyOnWriteTable.java:
##########
@@ -413,14 +414,25 @@ public Iterator<List<WriteStatus>> handleUpdate(
// always using avro record merger for legacy compaction since log scanner
do not support rowdata reading yet.
config.setRecordMergerClass(HoodieAvroRecordMerger.class.getName());
// these are updates
- HoodieMergeHandle upsertHandle = getUpdateHandle(instantTime,
partitionPath, fileId, keyToNewRecords, oldDataFile);
- return handleUpdateInternal(upsertHandle, instantTime, fileId);
+ HoodieMergeHandle mergeHandle = getUpdateHandle(instantTime,
partitionPath, fileId, keyToNewRecords, oldDataFile);
+ return handleUpdateInternal(mergeHandle, instantTime, fileId);
}
- protected Iterator<List<WriteStatus>>
handleUpdateInternal(HoodieMergeHandle<?, ?, ?, ?> upsertHandle, String
instantTime,
+ protected Iterator<List<WriteStatus>>
handleUpdateInternal(HoodieMergeHandle<?, ?, ?, ?> mergeHandle, String
instantTime,
Review Comment:
thanks
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]