alexeykudinkin commented on code in PR #5629:
URL: https://github.com/apache/hudi/pull/5629#discussion_r926262756


##########
hudi-client/hudi-client-common/src/main/java/org/apache/hudi/common/table/log/HoodieFileSliceReader.java:
##########
@@ -21,64 +21,46 @@
 
 import org.apache.hudi.common.model.HoodieRecord;
 import org.apache.hudi.common.util.Option;
-import org.apache.hudi.common.util.SpillableMapUtils;
 import org.apache.hudi.common.util.collection.Pair;
-import org.apache.hudi.config.HoodiePayloadConfig;
 import org.apache.hudi.exception.HoodieIOException;
-import org.apache.hudi.io.storage.HoodieAvroFileReader;
+import org.apache.hudi.io.storage.HoodieFileReader;
 
 import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericRecord;
 
 import java.io.IOException;
 import java.util.Iterator;
+import java.util.Properties;
 import java.util.stream.StreamSupport;
 
 /**
  * Reads records from base file and merges any updates from log files and 
provides iterable over all records in the file slice.
  */
 public class HoodieFileSliceReader<T> implements Iterator<HoodieRecord<T>> {
+
   private final Iterator<HoodieRecord<T>> recordsIterator;
 
   public static HoodieFileSliceReader getFileSliceReader(
-      Option<HoodieAvroFileReader> baseFileReader, 
HoodieMergedLogRecordScanner scanner, Schema schema, String payloadClass,
-      String preCombineField, Option<Pair<String, String>> 
simpleKeyGenFieldsOpt) throws IOException {
+      Option<HoodieFileReader> baseFileReader, HoodieMergedLogRecordScanner 
scanner, Schema schema, Properties props, Option<Pair<String, String>> 
simpleKeyGenFieldsOpt) throws IOException {
     if (baseFileReader.isPresent()) {
-      Iterator baseIterator = baseFileReader.get().getRecordIterator(schema);
+      Iterator<HoodieRecord> baseIterator = 
baseFileReader.get().getRecordIterator(schema);
       while (baseIterator.hasNext()) {
-        GenericRecord record = (GenericRecord) baseIterator.next();
-        HoodieRecord hoodieRecord = transform(
-            record, scanner, payloadClass, preCombineField, 
simpleKeyGenFieldsOpt);
-        scanner.processNextRecord(hoodieRecord);
+        scanner.processNextRecord(baseIterator.next().expansion(props, 
simpleKeyGenFieldsOpt,
+            scanner.isWithOperationField(), scanner.getPartitionName(), 
false));
       }
       return new HoodieFileSliceReader(scanner.iterator());
     } else {
       Iterable<HoodieRecord> iterable = () -> scanner.iterator();
-      HoodiePayloadConfig payloadConfig = 
HoodiePayloadConfig.newBuilder().withPayloadOrderingField(preCombineField).build();
       return new 
HoodieFileSliceReader(StreamSupport.stream(iterable.spliterator(), false)
           .map(e -> {
             try {
-              GenericRecord record = (GenericRecord) e.toIndexedRecord(schema, 
payloadConfig.getProps()).get();
-              return transform(record, scanner, payloadClass, preCombineField, 
simpleKeyGenFieldsOpt);
+              return e.expansion(props, simpleKeyGenFieldsOpt, 
scanner.isWithOperationField(), scanner.getPartitionName(), false);

Review Comment:
   I don't think we should carry over existing `transform` into 
`HoodieRecord.expansion` as is -- 
   while `transform` method was appropriate (was wrapping Avro into 
HoodieRecord), `expansion` method by itself doesn't make much sense: we already 
iterate by `HoodieRecord`, why do we need to expand it? We should be able to 
appropriately initialize `HoodieRecord` (key, partition-path) when we 
instantiate it during iteration
   
   
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to