This is an automated email from the ASF dual-hosted git repository.
danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 64f546b8f0c [HUDI-7873] Remove method from reader context (#11452)
64f546b8f0c is described below
commit 64f546b8f0cae70793a6150170a649bad8e0e146
Author: Jon Vexler <[email protected]>
AuthorDate: Wed Aug 14 19:46:20 2024 -0400
[HUDI-7873] Remove method from reader context (#11452)
Co-authored-by: Jonathan Vexler <=>
---
.../org/apache/hudi/BaseSparkInternalRowReaderContext.java | 7 -------
.../org/apache/hudi/common/engine/HoodieReaderContext.java | 10 ----------
.../hudi/common/testutils/reader/HoodieTestReaderContext.java | 7 -------
.../testutils/reader/HoodieFileGroupReaderTestHarness.java | 7 +++++--
.../java/org/apache/hudi/hadoop/HiveHoodieReaderContext.java | 7 -------
5 files changed, 5 insertions(+), 33 deletions(-)
diff --git
a/hudi-client/hudi-spark-client/src/main/scala/org/apache/hudi/BaseSparkInternalRowReaderContext.java
b/hudi-client/hudi-spark-client/src/main/scala/org/apache/hudi/BaseSparkInternalRowReaderContext.java
index 36bcba9214c..72b6276b458 100644
---
a/hudi-client/hudi-spark-client/src/main/scala/org/apache/hudi/BaseSparkInternalRowReaderContext.java
+++
b/hudi-client/hudi-spark-client/src/main/scala/org/apache/hudi/BaseSparkInternalRowReaderContext.java
@@ -29,9 +29,6 @@ import org.apache.hudi.common.model.HoodieSparkRecord;
import org.apache.hudi.common.util.ConfigUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;
-import org.apache.hudi.storage.HoodieStorage;
-import org.apache.hudi.storage.HoodieStorageUtils;
-import org.apache.hudi.storage.StorageConfiguration;
import org.apache.avro.Schema;
import org.apache.spark.sql.HoodieInternalRowUtils;
@@ -56,10 +53,6 @@ import static
org.apache.spark.sql.HoodieInternalRowUtils.getCachedSchema;
* Subclasses need to implement {@link #getFileRecordIterator} with the reader
logic.
*/
public abstract class BaseSparkInternalRowReaderContext extends
HoodieReaderContext<InternalRow> {
- @Override
- public HoodieStorage getStorage(String path, StorageConfiguration<?> conf) {
- return HoodieStorageUtils.getStorage(path, conf);
- }
@Override
public HoodieRecordMerger getRecordMerger(String mergerStrategy) {
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/engine/HoodieReaderContext.java
b/hudi-common/src/main/java/org/apache/hudi/common/engine/HoodieReaderContext.java
index 94b9e1cd02d..b12a11d4b57 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/engine/HoodieReaderContext.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/engine/HoodieReaderContext.java
@@ -27,7 +27,6 @@ import org.apache.hudi.common.util.ConfigUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.ClosableIterator;
import org.apache.hudi.storage.HoodieStorage;
-import org.apache.hudi.storage.StorageConfiguration;
import org.apache.hudi.storage.StoragePath;
import org.apache.avro.Schema;
@@ -144,15 +143,6 @@ public abstract class HoodieReaderContext<T> {
public static final String INTERNAL_META_INSTANT_TIME = "_4";
public static final String INTERNAL_META_SCHEMA = "_5";
- /**
- * Gets the file system based on the file path and configuration.
- *
- * @param path File path to get the file system.
- * @param conf {@link StorageConfiguration} for I/O.
- * @return The {@link HoodieStorage} instance to use.
- */
- public abstract HoodieStorage getStorage(String path,
StorageConfiguration<?> conf);
-
/**
* Gets the record iterator based on the type of engine-specific record
representation from the
* file.
diff --git
a/hudi-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieTestReaderContext.java
b/hudi-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieTestReaderContext.java
index 99dcd873fe7..f15a66cfe36 100644
---
a/hudi-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieTestReaderContext.java
+++
b/hudi-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieTestReaderContext.java
@@ -37,8 +37,6 @@ import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.io.storage.HoodieAvroFileReader;
import org.apache.hudi.io.storage.HoodieIOFactory;
import org.apache.hudi.storage.HoodieStorage;
-import org.apache.hudi.storage.HoodieStorageUtils;
-import org.apache.hudi.storage.StorageConfiguration;
import org.apache.hudi.storage.StoragePath;
import org.apache.avro.Schema;
@@ -67,11 +65,6 @@ public class HoodieTestReaderContext extends
HoodieReaderContext<IndexedRecord>
this.payloadClass = payloadClass;
}
- @Override
- public HoodieStorage getStorage(String path, StorageConfiguration<?> conf) {
- return HoodieStorageUtils.getStorage(path, conf);
- }
-
@Override
public ClosableIterator<IndexedRecord> getFileRecordIterator(
StoragePath filePath,
diff --git
a/hudi-hadoop-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieFileGroupReaderTestHarness.java
b/hudi-hadoop-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieFileGroupReaderTestHarness.java
index fb355fd735a..73221c5e3ad 100644
---
a/hudi-hadoop-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieFileGroupReaderTestHarness.java
+++
b/hudi-hadoop-common/src/test/java/org/apache/hudi/common/testutils/reader/HoodieFileGroupReaderTestHarness.java
@@ -29,6 +29,7 @@ import org.apache.hudi.common.testutils.HoodieTestTable;
import org.apache.hudi.common.testutils.HoodieTestUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.ClosableIterator;
+import org.apache.hudi.storage.HoodieStorage;
import org.apache.hudi.storage.StorageConfiguration;
import org.apache.hudi.storage.hadoop.HoodieHadoopStorage;
@@ -111,9 +112,11 @@ public class HoodieFileGroupReaderTestHarness extends
HoodieCommonTestHarness {
throws IOException, InterruptedException {
assert (numFiles >= 1 && numFiles <= keyRanges.size());
+ HoodieStorage hoodieStorage = new HoodieHadoopStorage(basePath,
storageConf);
+
Option<FileSlice> fileSliceOpt =
HoodieFileSliceTestUtils.getFileSlice(
- readerContext.getStorage(basePath, storageConf),
+ hoodieStorage,
keyRanges.subList(0, numFiles),
timestamps.subList(0, numFiles),
operationTypes.subList(0, numFiles),
@@ -134,7 +137,7 @@ public class HoodieFileGroupReaderTestHarness extends
HoodieCommonTestHarness {
0L,
Long.MAX_VALUE,
properties,
- new HoodieHadoopStorage(basePath, storageConf),
+ hoodieStorage,
readerContext,
metaClient
);
diff --git
a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HiveHoodieReaderContext.java
b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HiveHoodieReaderContext.java
index 904d4882cc9..46fcba63112 100644
---
a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HiveHoodieReaderContext.java
+++
b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HiveHoodieReaderContext.java
@@ -36,8 +36,6 @@ import
org.apache.hudi.hadoop.utils.HoodieArrayWritableAvroUtils;
import org.apache.hudi.hadoop.utils.HoodieRealtimeRecordReaderUtils;
import org.apache.hudi.hadoop.utils.ObjectInspectorCache;
import org.apache.hudi.storage.HoodieStorage;
-import org.apache.hudi.storage.HoodieStorageUtils;
-import org.apache.hudi.storage.StorageConfiguration;
import org.apache.hudi.storage.StoragePath;
import org.apache.avro.Schema;
@@ -142,11 +140,6 @@ public class HiveHoodieReaderContext extends
HoodieReaderContext<ArrayWritable>
.stream().map(f ->
String.valueOf(dataSchema.getField(f.name()).pos())).collect(Collectors.joining(",")));
}
- @Override
- public HoodieStorage getStorage(String path, StorageConfiguration<?> conf) {
- return HoodieStorageUtils.getStorage(path, conf);
- }
-
@Override
public ClosableIterator<ArrayWritable> getFileRecordIterator(StoragePath
filePath, long start, long length, Schema dataSchema, Schema requiredSchema,
HoodieStorage storage) throws IOException {
JobConf jobConfCopy = new JobConf(jobConf);