This is an automated email from the ASF dual-hosted git repository.
danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new f35ae58089f [HUDI-7681] Remove Hadoop Path usage in a few classes in
hudi-common module (#11108)
f35ae58089f is described below
commit f35ae58089fd9725ae09c1f183d4c97bcc2fce12
Author: Y Ethan Guo <[email protected]>
AuthorDate: Fri Apr 26 18:52:58 2024 -0700
[HUDI-7681] Remove Hadoop Path usage in a few classes in hudi-common module
(#11108)
---
.../versioning/clean/CleanMetadataV1MigrationHandler.java | 9 ++++-----
.../versioning/clean/CleanMetadataV2MigrationHandler.java | 5 ++---
.../versioning/clean/CleanPlanV1MigrationHandler.java | 5 ++---
.../versioning/clean/CleanPlanV2MigrationHandler.java | 5 ++---
.../versioning/compaction/CompactionV1MigrationHandler.java | 9 ++++-----
.../versioning/compaction/CompactionV2MigrationHandler.java | 8 ++++----
.../src/main/java/org/apache/hudi/metrics/Metrics.java | 12 ++++++------
7 files changed, 24 insertions(+), 29 deletions(-)
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV1MigrationHandler.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV1MigrationHandler.java
index 41e3dc79399..38d2bf7828f 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV1MigrationHandler.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV1MigrationHandler.java
@@ -25,8 +25,7 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.StoragePath;
import java.util.Map;
import java.util.stream.Collectors;
@@ -57,7 +56,7 @@ public class CleanMetadataV1MigrationHandler extends
AbstractMigratorBase<Hoodie
public HoodieCleanMetadata downgradeFrom(HoodieCleanMetadata input) {
ValidationUtils.checkArgument(input.getVersion() == 2,
"Input version is " + input.getVersion() + ". Must be 2");
- final Path basePath = new Path(metaClient.getBasePath());
+ final StoragePath basePath = metaClient.getBasePathV2();
final Map<String, HoodieCleanPartitionMetadata> partitionMetadataMap =
input
.getPartitionMetadata()
@@ -94,11 +93,11 @@ public class CleanMetadataV1MigrationHandler extends
AbstractMigratorBase<Hoodie
.setVersion(getManagedVersion()).build();
}
- private static String convertToV1Path(Path basePath, String partitionPath,
String fileName) {
+ private static String convertToV1Path(StoragePath basePath, String
partitionPath, String fileName) {
if ((fileName == null) || (fileName.isEmpty())) {
return fileName;
}
- return new Path(FSUtils.constructAbsolutePath(basePath, partitionPath),
fileName).toString();
+ return new StoragePath(FSUtils.constructAbsolutePath(basePath,
partitionPath), fileName).toString();
}
}
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV2MigrationHandler.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV2MigrationHandler.java
index d811047cf6f..f0bc04af341 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV2MigrationHandler.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanMetadataV2MigrationHandler.java
@@ -24,8 +24,7 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.StoragePath;
import java.util.List;
import java.util.Map;
@@ -91,7 +90,7 @@ public class CleanMetadataV2MigrationHandler extends
AbstractMigratorBase<Hoodie
}
private List<String> convertToV2Path(List<String> paths) {
- return paths.stream().map(path -> new Path(path).getName())
+ return paths.stream().map(path -> new StoragePath(path).getName())
.collect(Collectors.toList());
}
}
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV1MigrationHandler.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV1MigrationHandler.java
index a4c4cefa2a2..63deff6e223 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV1MigrationHandler.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV1MigrationHandler.java
@@ -22,8 +22,7 @@ import org.apache.hudi.avro.model.HoodieCleanerPlan;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hudi.common.util.collection.Pair;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.StoragePath;
import java.util.ArrayList;
import java.util.Collections;
@@ -61,7 +60,7 @@ public class CleanPlanV1MigrationHandler extends
AbstractMigratorBase<HoodieClea
"This version do not support METADATA_ONLY bootstrapped tables.
Failed to downgrade.");
}
Map<String, List<String>> filesPerPartition =
plan.getFilePathsToBeDeletedPerPartition().entrySet().stream()
- .map(e -> Pair.of(e.getKey(), e.getValue().stream().map(v -> new
Path(v.getFilePath()).getName())
+ .map(e -> Pair.of(e.getKey(), e.getValue().stream().map(v -> new
StoragePath(v.getFilePath()).getName())
.collect(Collectors.toList()))).collect(Collectors.toMap(Pair::getKey,
Pair::getValue));
return new HoodieCleanerPlan(plan.getEarliestInstantToRetain(),
plan.getLastCompletedCommitTimestamp(),
plan.getPolicy(), filesPerPartition, VERSION, new HashMap<>(), new
ArrayList<>(), Collections.EMPTY_MAP);
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV2MigrationHandler.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV2MigrationHandler.java
index 99b5185ba73..2f921789443 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV2MigrationHandler.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/clean/CleanPlanV2MigrationHandler.java
@@ -24,8 +24,7 @@ import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hudi.common.util.collection.Pair;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.StoragePath;
import java.util.ArrayList;
import java.util.Collections;
@@ -55,7 +54,7 @@ public class CleanPlanV2MigrationHandler extends
AbstractMigratorBase<HoodieClea
Map<String, List<HoodieCleanFileInfo>> filePathsPerPartition =
plan.getFilesToBeDeletedPerPartition().entrySet().stream().map(e ->
Pair.of(e.getKey(), e.getValue().stream()
.map(v -> new HoodieCleanFileInfo(
- new
Path(FSUtils.constructAbsolutePathInHadoopPath(metaClient.getBasePath(),
e.getKey()), v).toString(), false))
+ new
StoragePath(FSUtils.constructAbsolutePath(metaClient.getBasePathV2(),
e.getKey()), v).toString(), false))
.collect(Collectors.toList()))).collect(Collectors.toMap(Pair::getKey,
Pair::getValue));
return new HoodieCleanerPlan(plan.getEarliestInstantToRetain(),
plan.getLastCompletedCommitTimestamp(),
plan.getPolicy(), new HashMap<>(), VERSION, filePathsPerPartition, new
ArrayList<>(), Collections.emptyMap());
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV1MigrationHandler.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV1MigrationHandler.java
index 31905b1ad4b..8e9307ac376 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV1MigrationHandler.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV1MigrationHandler.java
@@ -24,8 +24,7 @@ import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hudi.common.util.ValidationUtils;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.StoragePath;
import java.util.ArrayList;
import java.util.List;
@@ -56,7 +55,7 @@ public class CompactionV1MigrationHandler extends
AbstractMigratorBase<HoodieCom
public HoodieCompactionPlan downgradeFrom(HoodieCompactionPlan input) {
ValidationUtils.checkArgument(input.getVersion() == 2, "Input version is "
+ input.getVersion() + ". Must be 2");
HoodieCompactionPlan compactionPlan = new HoodieCompactionPlan();
- final Path basePath = new Path(metaClient.getBasePath());
+ final StoragePath basePath = metaClient.getBasePathV2();
List<HoodieCompactionOperation> v1CompactionOperationList = new
ArrayList<>();
if (null != input.getOperations()) {
v1CompactionOperationList = input.getOperations().stream().map(inp ->
@@ -73,11 +72,11 @@ public class CompactionV1MigrationHandler extends
AbstractMigratorBase<HoodieCom
return compactionPlan;
}
- private static String convertToV1Path(Path basePath, String partitionPath,
String fileName) {
+ private static String convertToV1Path(StoragePath basePath, String
partitionPath, String fileName) {
if ((fileName == null) || (fileName.isEmpty())) {
return fileName;
}
- return new Path(FSUtils.constructAbsolutePath(basePath, partitionPath),
fileName).toString();
+ return new StoragePath(FSUtils.constructAbsolutePath(basePath,
partitionPath), fileName).toString();
}
}
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV2MigrationHandler.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV2MigrationHandler.java
index 980766150ae..fde5bc14000 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV2MigrationHandler.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/compaction/CompactionV2MigrationHandler.java
@@ -23,8 +23,7 @@ import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hudi.common.util.ValidationUtils;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.StoragePath;
import java.util.ArrayList;
import java.util.List;
@@ -55,8 +54,9 @@ public class CompactionV2MigrationHandler extends
AbstractMigratorBase<HoodieCom
v2CompactionOperationList = input.getOperations().stream().map(inp ->
HoodieCompactionOperation.newBuilder().setBaseInstantTime(inp.getBaseInstantTime())
.setFileId(inp.getFileId()).setPartitionPath(inp.getPartitionPath()).setMetrics(inp.getMetrics())
- .setDataFilePath(inp.getDataFilePath() == null ? null : new
Path(inp.getDataFilePath()).getName()).setDeltaFilePaths(
- inp.getDeltaFilePaths().stream().map(s -> new
Path(s).getName()).collect(Collectors.toList()))
+ .setDataFilePath(inp.getDataFilePath() == null ? null : new
StoragePath(inp.getDataFilePath()).getName())
+ .setDeltaFilePaths(
+ inp.getDeltaFilePaths().stream().map(s -> new
StoragePath(s).getName()).collect(Collectors.toList()))
.build()).collect(Collectors.toList());
}
compactionPlan.setOperations(v2CompactionOperationList);
diff --git a/hudi-common/src/main/java/org/apache/hudi/metrics/Metrics.java
b/hudi-common/src/main/java/org/apache/hudi/metrics/Metrics.java
index 17e21254593..af32248eea1 100644
--- a/hudi-common/src/main/java/org/apache/hudi/metrics/Metrics.java
+++ b/hudi-common/src/main/java/org/apache/hudi/metrics/Metrics.java
@@ -23,12 +23,12 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.config.metrics.HoodieMetricsConfig;
-import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieStorage;
+import org.apache.hudi.storage.HoodieStorageUtils;
+import org.apache.hudi.storage.StoragePath;
import com.codahale.metrics.MetricRegistry;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -98,10 +98,10 @@ public class Metrics {
private List<MetricsReporter>
addAdditionalMetricsExporters(HoodieMetricsConfig metricConfig) {
List<MetricsReporter> reporterList = new ArrayList<>();
List<String> propPathList =
StringUtils.split(metricConfig.getMetricReporterFileBasedConfigs(), ",");
- try (FileSystem fs = HadoopFSUtils.getFs(propPathList.get(0), new
Configuration())) {
+ try (HoodieStorage storage =
HoodieStorageUtils.getStorage(propPathList.get(0), new Configuration())) {
for (String propPath : propPathList) {
HoodieMetricsConfig secondarySourceConfig =
HoodieMetricsConfig.newBuilder().fromInputStream(
- fs.open(new
Path(propPath))).withPath(metricConfig.getBasePath()).build();
+ storage.open(new
StoragePath(propPath))).withPath(metricConfig.getBasePath()).build();
Option<MetricsReporter> reporter =
MetricsReporterFactory.createReporter(secondarySourceConfig, registry);
if (reporter.isPresent()) {
reporterList.add(reporter.get());
@@ -192,7 +192,7 @@ public class Metrics {
private static String getBasePath(HoodieMetricsConfig metricsConfig) {
String basePath = metricsConfig.getBasePath();
if (basePath.endsWith(HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH)) {
- String toRemoveSuffix = Path.SEPARATOR +
HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH;
+ String toRemoveSuffix = StoragePath.SEPARATOR +
HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH;
basePath = basePath.substring(0, basePath.length() -
toRemoveSuffix.length());
}
return basePath;