This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new a078242b19d [HUDI-7343] Replace Path.SEPARATOR with
HoodieLocation.SEPARATOR (#10570)
a078242b19d is described below
commit a078242b19dc3f8b46d08e197d8b77fa34f1808a
Author: Y Ethan Guo <[email protected]>
AuthorDate: Tue Jan 30 08:47:30 2024 -0800
[HUDI-7343] Replace Path.SEPARATOR with HoodieLocation.SEPARATOR (#10570)
---
.../apache/hudi/cli/commands/ExportCommand.java | 5 +--
.../cli/commands/TestHoodieLogFileCommand.java | 3 +-
.../apache/hudi/cli/commands/TestTableCommand.java | 5 +--
.../hudi/cli/integ/ITTestBootstrapCommand.java | 9 +++---
.../cli/integ/ITTestHDFSParquetImportCommand.java | 5 +--
.../hudi/cli/integ/ITTestMarkersCommand.java | 5 +--
.../hudi/cli/integ/ITTestSavepointsCommand.java | 3 +-
.../apache/hudi/cli/integ/ITTestTableCommand.java | 12 ++++----
.../hudi/client/heartbeat/HeartbeatUtils.java | 3 +-
.../client/heartbeat/HoodieHeartbeatClient.java | 6 ++--
.../lock/FileSystemBasedLockProvider.java | 7 +++--
.../BaseHoodieFunctionalIndexClient.java | 5 ++-
.../timeline/TestCompletionTimeQueryView.java | 6 ++--
.../utils/TestLegacyArchivedMetaEntryReader.java | 5 +--
.../hudi/client/TestJavaHoodieBackedMetadata.java | 9 +++---
.../client/utils/SparkMetadataWriterUtils.java | 3 +-
.../hudi/client/TestHoodieClientMultiWriter.java | 3 +-
.../functional/TestHoodieBackedMetadata.java | 19 ++++++------
.../DirectMarkerBasedDetectionStrategy.java | 3 +-
.../hudi/common/fs/inline/InLineFSUtils.java | 12 +++++---
.../common/heartbeat/HoodieHeartbeatUtils.java | 4 ++-
.../hudi/common/table/HoodieTableMetaClient.java | 36 ++++++++++++----------
.../hudi/metadata/AbstractHoodieTableMetadata.java | 9 +++---
.../hudi/metadata/HoodieMetadataPayload.java | 3 +-
.../apache/hudi/metadata/HoodieTableMetadata.java | 11 ++++---
.../hudi/metadata/HoodieTableMetadataUtil.java | 3 +-
.../common/fs/TestHoodieWrapperFileSystem.java | 3 +-
.../org/apache/hudi/sink/meta/CkpMetadata.java | 4 ++-
.../java/org/apache/hudi/source/FileIndex.java | 3 +-
.../hudi/table/catalog/TableOptionProperties.java | 3 +-
.../apache/hudi/table/format/FilePathUtils.java | 5 +--
.../main/java/org/apache/hudi/util/ClientIds.java | 3 +-
.../apache/hudi/util/ViewStorageProperties.java | 3 +-
.../apache/hudi/sink/ITTestDataStreamWrite.java | 3 +-
.../hudi/sink/bucket/ITTestBucketStreamWrite.java | 3 +-
.../org/apache/hudi/sink/utils/TestWriteBase.java | 4 ++-
.../test/java/org/apache/hudi/utils/TestUtils.java | 3 +-
.../hudi/hadoop/utils/HoodieInputFormatUtils.java | 3 +-
.../apache/hudi/hadoop/TestInputPathHandler.java | 13 ++++----
.../procedures/ExportInstantsProcedure.scala | 16 +++++-----
.../apache/hudi/testutils/DataSourceTestUtils.java | 3 +-
.../org/apache/hudi/TestHoodieFileIndex.scala | 19 +++++++-----
.../hudi/procedure/TestBootstrapProcedure.scala | 25 +++++++--------
.../procedure/TestHdfsParquetImportProcedure.scala | 5 +--
.../hudi/analysis/HoodieSpark32PlusAnalysis.scala | 18 +++++------
.../hudi/hive/testutils/HiveTestService.java | 4 +--
.../MarkerBasedEarlyConflictDetectionRunnable.java | 3 +-
.../utilities/streamer/SparkSampleWritesUtils.java | 3 +-
48 files changed, 197 insertions(+), 146 deletions(-)
diff --git
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java
index 40e7154b5f9..b196c62d0fb 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java
@@ -44,6 +44,7 @@ import
org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.util.collection.ClosableIterator;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -168,7 +169,7 @@ public class ExportCommand {
LOG.error("Could not load metadata for action " + action + " at
instant time " + instantTime);
continue;
}
- final String outPath = localFolder + Path.SEPARATOR + instantTime
+ "." + action;
+ final String outPath = localFolder + HoodieLocation.SEPARATOR +
instantTime + "." + action;
writeToFile(outPath, HoodieAvroUtils.avroToJson(metadata, true));
}
}
@@ -190,7 +191,7 @@ public class ExportCommand {
final HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
final HoodieActiveTimeline timeline = metaClient.getActiveTimeline();
for (HoodieInstant instant : instants) {
- String localPath = localFolder + Path.SEPARATOR + instant.getFileName();
+ String localPath = localFolder + HoodieLocation.SEPARATOR +
instant.getFileName();
byte[] data = null;
switch (instant.getAction()) {
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java
index 71046feecf2..bb35509235b 100644
---
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java
+++
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java
@@ -44,6 +44,7 @@ import org.apache.hudi.common.testutils.SchemaTestUtil;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -201,7 +202,7 @@ public class TestHoodieLogFileCommand extends
CLIFunctionalTestHarness {
// write to path '2015/03/16'.
Schema schema = HoodieAvroUtils.addMetadataFields(getSimpleSchema());
- partitionPath = tablePath + Path.SEPARATOR +
HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARTITION_PATH;
+ partitionPath = tablePath + HoodieLocation.SEPARATOR +
HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARTITION_PATH;
Files.createDirectories(Paths.get(partitionPath));
HoodieLogFormat.Writer writer = null;
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java
index 2eed406c669..22d108241c6 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java
@@ -32,6 +32,7 @@ import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.avro.Schema;
import org.apache.hadoop.fs.FileSystem;
@@ -146,7 +147,7 @@ public class TestTableCommand extends
CLIFunctionalTestHarness {
assertTrue(ShellEvaluationResultUtil.isSuccess(result));
assertEquals("Metadata for table " + tableName + " loaded",
result.toString());
HoodieTableMetaClient client = HoodieCLI.getTableMetaClient();
- assertEquals(metaPath + Path.SEPARATOR + "archive",
client.getArchivePath());
+ assertEquals(metaPath + HoodieLocation.SEPARATOR + "archive",
client.getArchivePath());
assertEquals(tablePath, client.getBasePath());
assertEquals(metaPath, client.getMetaPath());
assertEquals(HoodieTableType.MERGE_ON_READ, client.getTableType());
@@ -185,7 +186,7 @@ public class TestTableCommand extends
CLIFunctionalTestHarness {
private void testRefreshCommand(String command) throws IOException {
// clean table matedata
FileSystem fs = FileSystem.get(hadoopConf());
- fs.delete(new Path(tablePath + Path.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME), true);
+ fs.delete(new Path(tablePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME), true);
// Create table
assertTrue(prepareTable());
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
index f22ce1bbaf5..4e7a9c68a1e 100644
---
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
+++
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
@@ -18,7 +18,6 @@
package org.apache.hudi.cli.integ;
-import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.commands.TableCommand;
@@ -27,6 +26,8 @@ import
org.apache.hudi.cli.testutils.ShellEvaluationResultUtil;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.functional.TestBootstrap;
+import org.apache.hudi.storage.HoodieLocation;
+
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.junit.jupiter.api.BeforeEach;
@@ -64,8 +65,8 @@ public class ITTestBootstrapCommand extends
HoodieCLIIntegrationTestBase {
public void init() {
String srcName = "source";
tableName = "test-table";
- sourcePath = basePath + Path.SEPARATOR + srcName;
- tablePath = basePath + Path.SEPARATOR + tableName;
+ sourcePath = basePath + HoodieLocation.SEPARATOR + srcName;
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
// generate test data
partitions = Arrays.asList("2018", "2019", "2020");
@@ -73,7 +74,7 @@ public class ITTestBootstrapCommand extends
HoodieCLIIntegrationTestBase {
for (int i = 0; i < partitions.size(); i++) {
Dataset<Row> df = TestBootstrap.generateTestRawTripDataset(timestamp,
i * NUM_OF_RECORDS, i * NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc,
sqlContext);
- df.write().parquet(sourcePath + Path.SEPARATOR + PARTITION_FIELD + "=" +
partitions.get(i));
+ df.write().parquet(sourcePath + HoodieLocation.SEPARATOR +
PARTITION_FIELD + "=" + partitions.get(i));
}
}
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
index 930f6b0064c..5f19bca2579 100644
---
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
+++
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
@@ -26,6 +26,7 @@ import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.testutils.HoodieClientTestUtils;
import org.apache.hudi.utilities.HDFSParquetImporter;
import org.apache.hudi.utilities.functional.TestHDFSParquetImporter;
@@ -76,7 +77,7 @@ public class ITTestHDFSParquetImportCommand extends
HoodieCLIIntegrationTestBase
@BeforeEach
public void init() throws IOException, ParseException {
tableName = "test_table";
- tablePath = basePath + Path.SEPARATOR + tableName;
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
sourcePath = new Path(basePath, "source");
targetPath = new Path(tablePath);
schemaFile = new Path(basePath, "file.schema").toString();
@@ -108,7 +109,7 @@ public class ITTestHDFSParquetImportCommand extends
HoodieCLIIntegrationTestBase
() -> assertEquals("Table imported to hoodie format",
result.toString()));
// Check hudi table exist
- String metaPath = targetPath + Path.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME;
+ String metaPath = targetPath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME;
assertTrue(Files.exists(Paths.get(metaPath)), "Hoodie table not exist.");
// Load meta data
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
index 5aacfd82de0..194c0b49889 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
@@ -18,7 +18,6 @@
package org.apache.hudi.cli.integ;
-import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil;
@@ -26,6 +25,8 @@ import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.IOType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.FileCreateUtils;
+import org.apache.hudi.storage.HoodieLocation;
+
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
@@ -53,7 +54,7 @@ public class ITTestMarkersCommand extends
HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
String tableName = "test_table";
- tablePath = basePath + Path.SEPARATOR + tableName;
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
// Create table and connect
new TableCommand().createTable(
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
index f74d3c0adfe..3aebd6a483f 100644
---
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
+++
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
@@ -33,6 +33,7 @@ import
org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.BeforeEach;
@@ -65,7 +66,7 @@ public class ITTestSavepointsCommand extends
HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
String tableName = "test_table";
- tablePath = basePath + Path.SEPARATOR + tableName;
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
// Create table and connect
new TableCommand().createTable(
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestTableCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestTableCommand.java
index f10ba576475..260df2b532a 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestTableCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestTableCommand.java
@@ -37,8 +37,8 @@ import org.apache.hudi.config.HoodieIndexConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.index.HoodieIndex;
+import org.apache.hudi.storage.HoodieLocation;
-import org.apache.hadoop.fs.Path;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.junit.jupiter.api.Test;
@@ -72,7 +72,7 @@ public class ITTestTableCommand extends
HoodieCLIIntegrationTestBase {
@Test
public void testChangeTableCOW2MOR() throws IOException {
- tablePath = basePath + Path.SEPARATOR + tableName + "_cow2mor";
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.COPY_ON_WRITE.name(),
@@ -89,7 +89,7 @@ public class ITTestTableCommand extends
HoodieCLIIntegrationTestBase {
@Test
public void testChangeTableMOR2COW() throws IOException {
- tablePath = basePath + Path.SEPARATOR + tableName + "_mor2cow";
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_mor2cow";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
@@ -104,7 +104,7 @@ public class ITTestTableCommand extends
HoodieCLIIntegrationTestBase {
@Test
public void testChangeTableMOR2COW_withPendingCompactions() throws Exception
{
- tablePath = basePath + Path.SEPARATOR + tableName + "_cow2mor";
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
@@ -136,7 +136,7 @@ public class ITTestTableCommand extends
HoodieCLIIntegrationTestBase {
@Test
public void testChangeTableMOR2COW_withFullCompaction() throws Exception {
- tablePath = basePath + Path.SEPARATOR + tableName + "_cow2mor";
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
@@ -161,7 +161,7 @@ public class ITTestTableCommand extends
HoodieCLIIntegrationTestBase {
@Test
public void testChangeTableMOR2COW_withoutCompaction() throws Exception {
- tablePath = basePath + Path.SEPARATOR + tableName + "_cow2mor";
+ tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
diff --git
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HeartbeatUtils.java
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HeartbeatUtils.java
index 7c2642da250..40e08275b29 100644
---
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HeartbeatUtils.java
+++
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HeartbeatUtils.java
@@ -22,6 +22,7 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.HoodieTable;
import org.apache.hadoop.fs.FileSystem;
@@ -51,7 +52,7 @@ public class HeartbeatUtils {
boolean deleted = false;
try {
String heartbeatFolderPath =
HoodieTableMetaClient.getHeartbeatFolderPath(basePath);
- deleted = fs.delete(new Path(heartbeatFolderPath + Path.SEPARATOR +
instantTime), false);
+ deleted = fs.delete(new Path(heartbeatFolderPath +
HoodieLocation.SEPARATOR + instantTime), false);
if (!deleted) {
LOG.error("Failed to delete heartbeat for instant " + instantTime);
} else {
diff --git
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HoodieHeartbeatClient.java
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HoodieHeartbeatClient.java
index 93656aa2946..bb08ae997d9 100644
---
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HoodieHeartbeatClient.java
+++
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/heartbeat/HoodieHeartbeatClient.java
@@ -22,6 +22,7 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieHeartbeatException;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -226,7 +227,8 @@ public class HoodieHeartbeatClient implements
AutoCloseable, Serializable {
}
public static Boolean heartbeatExists(FileSystem fs, String basePath, String
instantTime) throws IOException {
- Path heartbeatFilePath = new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath) + Path.SEPARATOR +
instantTime);
+ Path heartbeatFilePath = new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath)
+ + HoodieLocation.SEPARATOR + instantTime);
return fs.exists(heartbeatFilePath);
}
@@ -253,7 +255,7 @@ public class HoodieHeartbeatClient implements
AutoCloseable, Serializable {
try {
Long newHeartbeatTime = System.currentTimeMillis();
OutputStream outputStream =
- this.fs.create(new Path(heartbeatFolderPath + Path.SEPARATOR +
instantTime), true);
+ this.fs.create(new Path(heartbeatFolderPath +
HoodieLocation.SEPARATOR + instantTime), true);
outputStream.close();
Heartbeat heartbeat = instantToHeartbeatMap.get(instantTime);
if (heartbeat.getLastHeartbeatTime() != null &&
isHeartbeatExpired(instantTime)) {
diff --git
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
index c5692315648..ef1e7e0a54e 100644
---
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
+++
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
@@ -33,6 +33,7 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieLockException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StorageSchemes;
import org.apache.hadoop.conf.Configuration;
@@ -77,10 +78,10 @@ public class FileSystemBasedLockProvider implements
LockProvider<String>, Serial
String lockDirectory =
lockConfiguration.getConfig().getString(FILESYSTEM_LOCK_PATH_PROP_KEY, null);
if (StringUtils.isNullOrEmpty(lockDirectory)) {
lockDirectory =
lockConfiguration.getConfig().getString(HoodieWriteConfig.BASE_PATH.key())
- + Path.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
+ + HoodieLocation.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
}
this.lockTimeoutMinutes =
lockConfiguration.getConfig().getInteger(FILESYSTEM_LOCK_EXPIRE_PROP_KEY);
- this.lockFile = new Path(lockDirectory + Path.SEPARATOR + LOCK_FILE_NAME);
+ this.lockFile = new Path(lockDirectory + HoodieLocation.SEPARATOR +
LOCK_FILE_NAME);
this.lockInfo = new LockInfo();
this.sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
this.fs = HadoopFSUtils.getFs(this.lockFile.toString(), configuration);
@@ -220,6 +221,6 @@ public class FileSystemBasedLockProvider implements
LockProvider<String>, Serial
* <p>IMPORTANT: this path should be shared especially when there is engine
cooperation.
*/
private static String defaultLockPath(String tablePath) {
- return tablePath + Path.SEPARATOR + AUXILIARYFOLDER_NAME;
+ return tablePath + HoodieLocation.SEPARATOR + AUXILIARYFOLDER_NAME;
}
}
diff --git
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/index/functional/BaseHoodieFunctionalIndexClient.java
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/index/functional/BaseHoodieFunctionalIndexClient.java
index 2adb7dddeb4..693699d59d7 100644
---
a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/index/functional/BaseHoodieFunctionalIndexClient.java
+++
b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/index/functional/BaseHoodieFunctionalIndexClient.java
@@ -21,6 +21,7 @@ package org.apache.hudi.table.action.index.functional;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
@@ -45,7 +46,9 @@ public abstract class BaseHoodieFunctionalIndexClient {
public void register(HoodieTableMetaClient metaClient, String indexName,
String indexType, Map<String, Map<String, String>> columns, Map<String, String>
options) {
LOG.info("Registering index {} of using {}", indexName, indexType);
String indexMetaPath = metaClient.getTableConfig().getIndexDefinitionPath()
- .orElseGet(() -> metaClient.getMetaPath() + Path.SEPARATOR +
HoodieTableMetaClient.INDEX_DEFINITION_FOLDER_NAME + Path.SEPARATOR +
HoodieTableMetaClient.INDEX_DEFINITION_FILE_NAME);
+ .orElseGet(() -> metaClient.getMetaPath()
+ + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.INDEX_DEFINITION_FOLDER_NAME
+ + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.INDEX_DEFINITION_FILE_NAME);
// build HoodieFunctionalIndexMetadata and then add to index definition
file
metaClient.buildFunctionalIndexDefinition(indexMetaPath, indexName,
indexType, columns, options);
// update table config if necessary
diff --git
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/timeline/TestCompletionTimeQueryView.java
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/timeline/TestCompletionTimeQueryView.java
index 9b65ab225e4..9dbc9c8656f 100644
---
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/timeline/TestCompletionTimeQueryView.java
+++
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/timeline/TestCompletionTimeQueryView.java
@@ -35,10 +35,10 @@ import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.config.HoodieIndexConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.index.HoodieIndex;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.HoodieTable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -66,7 +66,7 @@ public class TestCompletionTimeQueryView {
@Test
void testReadCompletionTime() throws Exception {
String tableName = "testTable";
- String tablePath = tempFile.getAbsolutePath() + Path.SEPARATOR + tableName;
+ String tablePath = tempFile.getAbsolutePath() + HoodieLocation.SEPARATOR +
tableName;
HoodieTableMetaClient metaClient = HoodieTestUtils.init(new
Configuration(), tablePath, HoodieTableType.COPY_ON_WRITE, tableName);
prepareTimeline(tablePath, metaClient);
try (CompletionTimeQueryView view = new
CompletionTimeQueryView(metaClient, String.format("%08d", 3))) {
@@ -95,7 +95,7 @@ public class TestCompletionTimeQueryView {
@Test
void testReadStartTime() throws Exception {
String tableName = "testTable";
- String tablePath = tempFile.getAbsolutePath() + Path.SEPARATOR + tableName;
+ String tablePath = tempFile.getAbsolutePath() + HoodieLocation.SEPARATOR +
tableName;
HoodieTableMetaClient metaClient = HoodieTestUtils.init(new
Configuration(), tablePath, HoodieTableType.COPY_ON_WRITE, tableName);
prepareTimeline(tablePath, metaClient);
try (CompletionTimeQueryView view = new
CompletionTimeQueryView(metaClient, String.format("%08d", 3))) {
diff --git
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/utils/TestLegacyArchivedMetaEntryReader.java
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/utils/TestLegacyArchivedMetaEntryReader.java
index 5b43d5c7e53..c3ab604adcc 100644
---
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/utils/TestLegacyArchivedMetaEntryReader.java
+++
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/client/utils/TestLegacyArchivedMetaEntryReader.java
@@ -19,7 +19,6 @@
package org.apache.hudi.client.utils;
import org.apache.hudi.avro.model.HoodieArchivedMetaEntry;
-import org.apache.hudi.common.table.timeline.ActiveAction;
import org.apache.hudi.common.model.HoodieArchivedLogFile;
import org.apache.hudi.common.model.HoodieAvroIndexedRecord;
import org.apache.hudi.common.model.HoodieCommitMetadata;
@@ -30,6 +29,7 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.log.HoodieLogFormat;
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
+import org.apache.hudi.common.table.timeline.ActiveAction;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.MetadataConversionUtils;
@@ -39,6 +39,7 @@ import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.ClosableIterator;
import org.apache.hudi.exception.HoodieCommitException;
import org.apache.hudi.exception.HoodieException;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
@@ -73,7 +74,7 @@ public class TestLegacyArchivedMetaEntryReader {
@Test
void testReadLegacyArchivedTimeline() throws Exception {
String tableName = "testTable";
- String tablePath = tempFile.getAbsolutePath() + Path.SEPARATOR + tableName;
+ String tablePath = tempFile.getAbsolutePath() + HoodieLocation.SEPARATOR +
tableName;
HoodieTableMetaClient metaClient = HoodieTestUtils.init(new
Configuration(), tablePath, HoodieTableType.COPY_ON_WRITE, tableName);
prepareLegacyArchivedTimeline(metaClient);
LegacyArchivedMetaEntryReader reader = new
LegacyArchivedMetaEntryReader(metaClient);
diff --git
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
index fa4795265ad..bf6264a6948 100644
---
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
+++
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
@@ -98,6 +98,7 @@ import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.metadata.HoodieTableMetadataUtil;
import org.apache.hudi.metadata.JavaHoodieBackedTableMetadataWriter;
import org.apache.hudi.metadata.MetadataPartitionType;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.HoodieJavaTable;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
@@ -1229,7 +1230,7 @@ public class TestJavaHoodieBackedMetadata extends
TestHoodieMetadataBase {
// remove the MDT partition from dataset to simulate failed bootstrap
Properties updateProperties = new Properties();
updateProperties.setProperty(HoodieTableConfig.TABLE_METADATA_PARTITIONS.key(),
"");
- HoodieTableConfig.update(fs, new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME),
+ HoodieTableConfig.update(fs, new Path(basePath + HoodieLocation.SEPARATOR
+ METAFOLDER_NAME),
updateProperties);
metaClient = HoodieTableMetaClient.reload(metaClient);
@@ -2179,7 +2180,7 @@ public class TestJavaHoodieBackedMetadata extends
TestHoodieMetadataBase {
// There is no way to simulate failed commit on the main dataset, hence
we simply delete the completed
// instant so that only the inflight is left over.
String commitInstantFileName =
metaClient.getActiveTimeline().getReverseOrderedInstants().findFirst().get().getFileName();
- assertTrue(fs.delete(new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME,
+ assertTrue(fs.delete(new Path(basePath + HoodieLocation.SEPARATOR +
METAFOLDER_NAME,
commitInstantFileName), false));
}
@@ -2279,7 +2280,7 @@ public class TestJavaHoodieBackedMetadata extends
TestHoodieMetadataBase {
// There is no way to simulate failed commit on the main dataset, hence
we simply delete the completed
// instant so that only the inflight is left over.
String commitInstantFileName =
metaClient.getActiveTimeline().getReverseOrderedInstants().findFirst().get().getFileName();
- assertTrue(fs.delete(new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME,
+ assertTrue(fs.delete(new Path(basePath + HoodieLocation.SEPARATOR +
METAFOLDER_NAME,
commitInstantFileName), false));
}
@@ -2422,7 +2423,7 @@ public class TestJavaHoodieBackedMetadata extends
TestHoodieMetadataBase {
// To simulate failed clean on the main dataset, we will delete the
completed clean instant
String cleanInstantFileName =
metaClient.reloadActiveTimeline().getCleanerTimeline().filterCompletedInstants()
.getReverseOrderedInstants().findFirst().get().getFileName();
- assertTrue(fs.delete(new Path(basePath + Path.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME,
+ assertTrue(fs.delete(new Path(basePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME,
cleanInstantFileName), false));
assertEquals(metaClient.reloadActiveTimeline().getCleanerTimeline().filterInflights().countInstants(),
1);
assertEquals(metaClient.reloadActiveTimeline().getCleanerTimeline().filterCompletedInstants().countInstants(),
0);
diff --git
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkMetadataWriterUtils.java
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkMetadataWriterUtils.java
index 6680cd9bba9..589c0152c7c 100644
---
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkMetadataWriterUtils.java
+++
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkMetadataWriterUtils.java
@@ -31,6 +31,7 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.data.HoodieJavaRDD;
import org.apache.hudi.index.functional.HoodieFunctionalIndex;
import org.apache.hudi.io.storage.HoodieFileWriterFactory;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.avro.Schema;
import org.apache.hadoop.fs.Path;
@@ -244,7 +245,7 @@ public class SparkMetadataWriterUtils {
if (partition.isEmpty()) {
return new Path(basePath, filename);
} else {
- return new Path(basePath, partition + Path.SEPARATOR + filename);
+ return new Path(basePath, partition + HoodieLocation.SEPARATOR +
filename);
}
}
}
diff --git
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
index d0896645427..1006d9e2fa2 100644
---
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
+++
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
@@ -47,6 +47,7 @@ import org.apache.hudi.config.HoodieCompactionConfig;
import org.apache.hudi.config.HoodieLockConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieWriteConflictException;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.marker.SimpleDirectMarkerBasedDetectionStrategy;
import
org.apache.hudi.table.marker.SimpleTransactionDirectMarkerBasedDetectionStrategy;
@@ -255,7 +256,7 @@ public class TestHoodieClientMultiWriter extends
HoodieClientTestBase {
HoodieWriteConfig config4 =
HoodieWriteConfig.newBuilder().withProperties(writeConfig.getProps()).withHeartbeatIntervalInMs(heartBeatIntervalForCommit4).build();
final SparkRDDWriteClient client4 = getHoodieWriteClient(config4);
- Path heartbeatFilePath = new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath) + Path.SEPARATOR +
nextCommitTime3);
+ Path heartbeatFilePath = new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath) +
HoodieLocation.SEPARATOR + nextCommitTime3);
fs.create(heartbeatFilePath, true);
// Wait for heart beat expired for failed commitTime3 "003"
diff --git
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
index 380f1b53c75..9a6833ad7f5 100644
---
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
+++
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
@@ -103,6 +103,7 @@ import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.metadata.HoodieTableMetadataUtil;
import org.apache.hudi.metadata.MetadataPartitionType;
import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
@@ -1646,7 +1647,7 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
// remove the MDT partition from dataset to simulate failed bootstrap
Properties updateProperties = new Properties();
updateProperties.setProperty(HoodieTableConfig.TABLE_METADATA_PARTITIONS.key(),
"");
- HoodieTableConfig.update(fs, new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME),
+ HoodieTableConfig.update(fs, new Path(basePath + HoodieLocation.SEPARATOR
+ METAFOLDER_NAME),
updateProperties);
metaClient = HoodieTableMetaClient.reload(metaClient);
@@ -2646,7 +2647,7 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
// There is no way to simulate failed commit on the main dataset, hence
we simply delete the completed
// instant so that only the inflight is left over.
String commitInstantFileName =
metaClient.getActiveTimeline().getReverseOrderedInstants().findFirst().get().getFileName();
- assertTrue(fs.delete(new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME,
+ assertTrue(fs.delete(new Path(basePath + HoodieLocation.SEPARATOR +
METAFOLDER_NAME,
commitInstantFileName), false));
}
@@ -2698,9 +2699,9 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
// metadata table partitions are rebootstrapped.
metadataWriter.dropMetadataPartitions(Arrays.asList(MetadataPartitionType.RECORD_INDEX,
FILES));
assertFalse(fs.exists(new Path(getMetadataTableBasePath(basePath)
- + Path.SEPARATOR + FILES.getPartitionPath())));
+ + HoodieLocation.SEPARATOR + FILES.getPartitionPath())));
assertFalse(fs.exists(new Path(getMetadataTableBasePath(basePath)
- + Path.SEPARATOR +
MetadataPartitionType.RECORD_INDEX.getPartitionPath())));
+ + HoodieLocation.SEPARATOR +
MetadataPartitionType.RECORD_INDEX.getPartitionPath())));
metaClient = HoodieTableMetaClient.reload(metaClient);
// Insert/upsert third batch of records
@@ -2717,14 +2718,14 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
writeStatuses = client.insert(jsc.parallelize(records, 1),
commitTime).collect();
}
assertNoWriteErrors(writeStatuses);
- assertTrue(fs.exists(new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME)));
+ assertTrue(fs.exists(new Path(basePath + HoodieLocation.SEPARATOR +
METAFOLDER_NAME)));
metaClient = HoodieTableMetaClient.reload(metaClient);
assertFalse(metaClient.getActiveTimeline().filterCompletedInstants().filterCompletedInstants().findInstantsAfterOrEquals(commitTime,
1).empty());
assertTrue(fs.exists(new Path(getMetadataTableBasePath(basePath)
- + Path.SEPARATOR + FILES.getPartitionPath())));
+ + HoodieLocation.SEPARATOR + FILES.getPartitionPath())));
assertTrue(fs.exists(new Path(getMetadataTableBasePath(basePath)
- + Path.SEPARATOR +
MetadataPartitionType.RECORD_INDEX.getPartitionPath())));
+ + HoodieLocation.SEPARATOR +
MetadataPartitionType.RECORD_INDEX.getPartitionPath())));
}
/**
@@ -2865,7 +2866,7 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
// There is no way to simulate failed commit on the main dataset, hence
we simply delete the completed
// instant so that only the inflight is left over.
String commitInstantFileName =
metaClient.getActiveTimeline().getReverseOrderedInstants().findFirst().get().getFileName();
- assertTrue(fs.delete(new Path(basePath + Path.SEPARATOR +
METAFOLDER_NAME,
+ assertTrue(fs.delete(new Path(basePath + HoodieLocation.SEPARATOR +
METAFOLDER_NAME,
commitInstantFileName), false));
}
@@ -3071,7 +3072,7 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
// To simulate failed clean on the main dataset, we will delete the
completed clean instant
String cleanInstantFileName =
metaClient.reloadActiveTimeline().getCleanerTimeline().filterCompletedInstants()
.getReverseOrderedInstants().findFirst().get().getFileName();
- assertTrue(fs.delete(new Path(basePath + Path.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME,
+ assertTrue(fs.delete(new Path(basePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.METAFOLDER_NAME,
cleanInstantFileName), false));
assertEquals(metaClient.reloadActiveTimeline().getCleanerTimeline().filterInflights().countInstants(),
1);
assertEquals(metaClient.reloadActiveTimeline().getCleanerTimeline().filterCompletedInstants().countInstants(),
0);
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/conflict/detection/DirectMarkerBasedDetectionStrategy.java
b/hudi-common/src/main/java/org/apache/hudi/common/conflict/detection/DirectMarkerBasedDetectionStrategy.java
index 1f3f4f2536d..ea08456d16e 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/conflict/detection/DirectMarkerBasedDetectionStrategy.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/conflict/detection/DirectMarkerBasedDetectionStrategy.java
@@ -27,6 +27,7 @@ import org.apache.hudi.common.util.MarkerUtils;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.hadoop.fs.HoodieWrapperFileSystem;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -77,7 +78,7 @@ public abstract class DirectMarkerBasedDetectionStrategy
implements EarlyConflic
* @throws IOException upon errors.
*/
public boolean checkMarkerConflict(String basePath, long
maxAllowableHeartbeatIntervalInMs) throws IOException {
- String tempFolderPath = basePath + Path.SEPARATOR +
HoodieTableMetaClient.TEMPFOLDER_NAME;
+ String tempFolderPath = basePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.TEMPFOLDER_NAME;
List<String> candidateInstants =
MarkerUtils.getCandidateInstants(activeTimeline,
Arrays.stream(fs.listStatus(new
Path(tempFolderPath))).map(FileStatus::getPath).collect(Collectors.toList()),
instantTime, maxAllowableHeartbeatIntervalInMs, fs, basePath);
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/fs/inline/InLineFSUtils.java
b/hudi-common/src/main/java/org/apache/hudi/common/fs/inline/InLineFSUtils.java
index 6031f29d907..06a96542585 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/fs/inline/InLineFSUtils.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/fs/inline/InLineFSUtils.java
@@ -18,6 +18,8 @@
package org.apache.hudi.common.fs.inline;
+import org.apache.hudi.storage.HoodieLocation;
+
import org.apache.hadoop.fs.Path;
import java.io.File;
@@ -33,8 +35,7 @@ import static
org.apache.hudi.common.util.ValidationUtils.checkArgument;
public class InLineFSUtils {
private static final String START_OFFSET_STR = "start_offset";
private static final String LENGTH_STR = "length";
- private static final String PATH_SEPARATOR = "/";
- private static final String SCHEME_SEPARATOR = ":";
+ private static final String SCHEME_SEPARATOR = "" +
HoodieLocation.COLON_CHAR;
private static final String EQUALS_STR = "=";
private static final String LOCAL_FILESYSTEM_SCHEME = "file";
@@ -54,8 +55,9 @@ public class InLineFSUtils {
public static Path getInlineFilePath(Path outerPath, String origScheme, long
inLineStartOffset, long inLineLength) {
final String subPath = new
File(outerPath.toString().substring(outerPath.toString().indexOf(":") +
1)).getPath();
return new Path(
- InLineFileSystem.SCHEME + SCHEME_SEPARATOR + PATH_SEPARATOR + subPath
+ PATH_SEPARATOR + origScheme
- + PATH_SEPARATOR + "?" + START_OFFSET_STR + EQUALS_STR +
inLineStartOffset
+ InLineFileSystem.SCHEME + SCHEME_SEPARATOR
+ + HoodieLocation.SEPARATOR + subPath + HoodieLocation.SEPARATOR +
origScheme
+ + HoodieLocation.SEPARATOR + "?" + START_OFFSET_STR + EQUALS_STR +
inLineStartOffset
+ "&" + LENGTH_STR + EQUALS_STR + inLineLength
);
}
@@ -84,7 +86,7 @@ public class InLineFSUtils {
final String pathExceptScheme =
basePath.toString().substring(basePath.toString().indexOf(SCHEME_SEPARATOR) +
1);
final String fullPath = outerFileScheme + SCHEME_SEPARATOR
- + (outerFileScheme.equals(LOCAL_FILESYSTEM_SCHEME) ? PATH_SEPARATOR :
"")
+ + (outerFileScheme.equals(LOCAL_FILESYSTEM_SCHEME) ?
HoodieLocation.SEPARATOR : "")
+ pathExceptScheme;
return new Path(fullPath);
}
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/heartbeat/HoodieHeartbeatUtils.java
b/hudi-common/src/main/java/org/apache/hudi/common/heartbeat/HoodieHeartbeatUtils.java
index 223d46e416f..f7af86f7954 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/heartbeat/HoodieHeartbeatUtils.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/heartbeat/HoodieHeartbeatUtils.java
@@ -20,6 +20,7 @@
package org.apache.hudi.common.heartbeat;
import org.apache.hudi.common.table.HoodieTableMetaClient;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -44,7 +45,8 @@ public class HoodieHeartbeatUtils {
* @throws IOException
*/
public static Long getLastHeartbeatTime(FileSystem fs, String basePath,
String instantTime) throws IOException {
- Path heartbeatFilePath = new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath) + Path.SEPARATOR +
instantTime);
+ Path heartbeatFilePath = new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath)
+ + HoodieLocation.SEPARATOR + instantTime);
if (fs.exists(heartbeatFilePath)) {
return fs.getFileStatus(heartbeatFilePath).getModificationTime();
} else {
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
index e2dca00da0c..9d451893a63 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
@@ -58,6 +58,7 @@ import org.apache.hudi.hadoop.fs.NoOpConsistencyGuard;
import org.apache.hudi.hadoop.fs.SerializablePath;
import org.apache.hudi.keygen.constant.KeyGeneratorType;
import org.apache.hudi.metadata.HoodieTableMetadata;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -101,17 +102,18 @@ public class HoodieTableMetaClient implements
Serializable {
private static final long serialVersionUID = 1L;
private static final Logger LOG =
LoggerFactory.getLogger(HoodieTableMetaClient.class);
public static final String METAFOLDER_NAME = ".hoodie";
- public static final String TEMPFOLDER_NAME = METAFOLDER_NAME +
Path.SEPARATOR + ".temp";
- public static final String AUXILIARYFOLDER_NAME = METAFOLDER_NAME +
Path.SEPARATOR + ".aux";
- public static final String BOOTSTRAP_INDEX_ROOT_FOLDER_PATH =
AUXILIARYFOLDER_NAME + Path.SEPARATOR + ".bootstrap";
- public static final String SAMPLE_WRITES_FOLDER_PATH = AUXILIARYFOLDER_NAME
+ Path.SEPARATOR + ".sample_writes";
- public static final String HEARTBEAT_FOLDER_NAME = METAFOLDER_NAME +
Path.SEPARATOR + ".heartbeat";
- public static final String METADATA_TABLE_FOLDER_PATH = METAFOLDER_NAME +
Path.SEPARATOR + "metadata";
- public static final String HASHING_METADATA_FOLDER_NAME = ".bucket_index" +
Path.SEPARATOR + "consistent_hashing_metadata";
+ public static final String TEMPFOLDER_NAME = METAFOLDER_NAME +
HoodieLocation.SEPARATOR + ".temp";
+ public static final String AUXILIARYFOLDER_NAME = METAFOLDER_NAME +
HoodieLocation.SEPARATOR + ".aux";
+ public static final String BOOTSTRAP_INDEX_ROOT_FOLDER_PATH =
AUXILIARYFOLDER_NAME + HoodieLocation.SEPARATOR + ".bootstrap";
+ public static final String SAMPLE_WRITES_FOLDER_PATH = AUXILIARYFOLDER_NAME
+ HoodieLocation.SEPARATOR + ".sample_writes";
+ public static final String HEARTBEAT_FOLDER_NAME = METAFOLDER_NAME +
HoodieLocation.SEPARATOR + ".heartbeat";
+ public static final String METADATA_TABLE_FOLDER_PATH = METAFOLDER_NAME +
HoodieLocation.SEPARATOR + "metadata";
+ public static final String HASHING_METADATA_FOLDER_NAME =
+ ".bucket_index" + HoodieLocation.SEPARATOR +
"consistent_hashing_metadata";
public static final String BOOTSTRAP_INDEX_BY_PARTITION_FOLDER_PATH =
BOOTSTRAP_INDEX_ROOT_FOLDER_PATH
- + Path.SEPARATOR + ".partitions";
- public static final String BOOTSTRAP_INDEX_BY_FILE_ID_FOLDER_PATH =
BOOTSTRAP_INDEX_ROOT_FOLDER_PATH + Path.SEPARATOR
- + ".fileids";
+ + HoodieLocation.SEPARATOR + ".partitions";
+ public static final String BOOTSTRAP_INDEX_BY_FILE_ID_FOLDER_PATH =
+ BOOTSTRAP_INDEX_ROOT_FOLDER_PATH + HoodieLocation.SEPARATOR + ".fileids";
public static final String SCHEMA_FOLDER_NAME = ".schema";
@@ -322,7 +324,7 @@ public class HoodieTableMetaClient implements Serializable {
* @return Temp Folder path
*/
public String getTempFolderPath() {
- return basePath + Path.SEPARATOR + TEMPFOLDER_NAME;
+ return basePath + HoodieLocation.SEPARATOR + TEMPFOLDER_NAME;
}
/**
@@ -332,35 +334,35 @@ public class HoodieTableMetaClient implements
Serializable {
* @return
*/
public String getMarkerFolderPath(String instantTs) {
- return String.format("%s%s%s", getTempFolderPath(), Path.SEPARATOR,
instantTs);
+ return String.format("%s%s%s", getTempFolderPath(),
HoodieLocation.SEPARATOR, instantTs);
}
/**
* @return Auxiliary Meta path
*/
public String getMetaAuxiliaryPath() {
- return basePath + Path.SEPARATOR + AUXILIARYFOLDER_NAME;
+ return basePath + HoodieLocation.SEPARATOR + AUXILIARYFOLDER_NAME;
}
/**
* @return Heartbeat folder path.
*/
public static String getHeartbeatFolderPath(String basePath) {
- return String.format("%s%s%s", basePath, Path.SEPARATOR,
HEARTBEAT_FOLDER_NAME);
+ return String.format("%s%s%s", basePath, HoodieLocation.SEPARATOR,
HEARTBEAT_FOLDER_NAME);
}
/**
* @return Bootstrap Index By Partition Folder
*/
public String getBootstrapIndexByPartitionFolderPath() {
- return basePath + Path.SEPARATOR +
BOOTSTRAP_INDEX_BY_PARTITION_FOLDER_PATH;
+ return basePath + HoodieLocation.SEPARATOR +
BOOTSTRAP_INDEX_BY_PARTITION_FOLDER_PATH;
}
/**
* @return Bootstrap Index By Hudi File Id Folder
*/
public String getBootstrapIndexByFileIdFolderNameFolderPath() {
- return basePath + Path.SEPARATOR + BOOTSTRAP_INDEX_BY_FILE_ID_FOLDER_PATH;
+ return basePath + HoodieLocation.SEPARATOR +
BOOTSTRAP_INDEX_BY_FILE_ID_FOLDER_PATH;
}
/**
@@ -368,7 +370,7 @@ public class HoodieTableMetaClient implements Serializable {
*/
public String getArchivePath() {
String archiveFolder = tableConfig.getArchivelogFolder();
- return getMetaPath() + Path.SEPARATOR + archiveFolder;
+ return getMetaPath() + HoodieLocation.SEPARATOR + archiveFolder;
}
/**
diff --git
a/hudi-common/src/main/java/org/apache/hudi/metadata/AbstractHoodieTableMetadata.java
b/hudi-common/src/main/java/org/apache/hudi/metadata/AbstractHoodieTableMetadata.java
index e84c646cb50..96d93d01bf5 100644
---
a/hudi-common/src/main/java/org/apache/hudi/metadata/AbstractHoodieTableMetadata.java
+++
b/hudi-common/src/main/java/org/apache/hudi/metadata/AbstractHoodieTableMetadata.java
@@ -27,8 +27,7 @@ import org.apache.hudi.hadoop.fs.CachingPath;
import org.apache.hudi.hadoop.fs.SerializablePath;
import org.apache.hudi.internal.schema.Type;
import org.apache.hudi.internal.schema.Types;
-
-import org.apache.hadoop.fs.Path;
+import org.apache.hudi.storage.HoodieLocation;
import java.util.Collections;
import java.util.List;
@@ -58,14 +57,14 @@ public abstract class AbstractHoodieTableMetadata
implements HoodieTableMetadata
int level = 1;
for (int i = 1; i < path.length() - 1; i++) {
- if (path.charAt(i) == Path.SEPARATOR_CHAR) {
+ if (path.charAt(i) == HoodieLocation.SEPARATOR_CHAR) {
level++;
}
}
- if (path.startsWith(Path.SEPARATOR)) {
+ if (path.startsWith(HoodieLocation.SEPARATOR)) {
level--;
}
- if (path.endsWith(Path.SEPARATOR)) {
+ if (path.endsWith(HoodieLocation.SEPARATOR)) {
level--;
}
return level;
diff --git
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieMetadataPayload.java
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieMetadataPayload.java
index c16d7754d22..40c0debf28e 100644
---
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieMetadataPayload.java
+++
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieMetadataPayload.java
@@ -39,6 +39,7 @@ import org.apache.hudi.common.util.hash.PartitionIndexID;
import org.apache.hudi.exception.HoodieMetadataException;
import org.apache.hudi.hadoop.fs.CachingPath;
import org.apache.hudi.io.storage.HoodieAvroHFileReaderImplBase;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.util.Lazy;
import org.apache.avro.Schema;
@@ -355,7 +356,7 @@ public class HoodieMetadataPayload implements
HoodieRecordPayload<HoodieMetadata
final String bloomFilterType,
final ByteBuffer bloomFilter,
final boolean isDeleted) {
- checkArgument(!baseFileName.contains(Path.SEPARATOR)
+ checkArgument(!baseFileName.contains(HoodieLocation.SEPARATOR)
&& FSUtils.isBaseFile(new Path(baseFileName)),
"Invalid base file '" + baseFileName + "' for MetaIndexBloomFilter!");
final String bloomFilterIndexKey = getBloomFilterRecordKey(partitionName,
baseFileName);
diff --git
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadata.java
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadata.java
index 6bbe02507f9..02719ac25f6 100644
---
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadata.java
+++
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadata.java
@@ -30,11 +30,12 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieMetadataException;
+import org.apache.hudi.expression.Expression;
+import org.apache.hudi.internal.schema.Types;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
-import org.apache.hudi.expression.Expression;
-import org.apache.hudi.internal.schema.Types;
import java.io.IOException;
import java.io.Serializable;
@@ -68,7 +69,7 @@ public interface HoodieTableMetadata extends Serializable,
AutoCloseable {
* Return the base-path of the Metadata Table for the given Dataset
identified by base-path
*/
static String getMetadataTableBasePath(String dataTableBasePath) {
- return dataTableBasePath + Path.SEPARATOR +
HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH;
+ return dataTableBasePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH;
}
/**
@@ -93,7 +94,7 @@ public interface HoodieTableMetadata extends Serializable,
AutoCloseable {
* @param metadataTableBasePath The base path of the metadata table
*/
static String getDatasetBasePath(String metadataTableBasePath) {
- int endPos = metadataTableBasePath.lastIndexOf(Path.SEPARATOR +
HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH);
+ int endPos = metadataTableBasePath.lastIndexOf(HoodieLocation.SEPARATOR +
HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH);
checkState(endPos != -1, metadataTableBasePath + " should be base path of
the metadata table");
return metadataTableBasePath.substring(0, endPos);
}
@@ -107,7 +108,7 @@ public interface HoodieTableMetadata extends Serializable,
AutoCloseable {
if (basePath == null || basePath.isEmpty()) {
return false;
}
- if (basePath.endsWith(Path.SEPARATOR)) {
+ if (basePath.endsWith(HoodieLocation.SEPARATOR)) {
basePath = basePath.substring(0, basePath.length() - 1);
}
return basePath.endsWith(HoodieTableMetaClient.METADATA_TABLE_FOLDER_PATH);
diff --git
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
index f6d65a6a53c..e9eba01bf83 100644
---
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
+++
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
@@ -82,6 +82,7 @@ import org.apache.hudi.exception.HoodieMetadataException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.io.storage.HoodieFileReader;
import org.apache.hudi.io.storage.HoodieFileReaderFactory;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.util.Lazy;
import org.apache.avro.AvroTypeException;
@@ -1911,7 +1912,7 @@ public class HoodieTableMetadataUtil {
if (partition.isEmpty()) {
return new Path(basePath, filename);
} else {
- return new Path(basePath, partition + Path.SEPARATOR + filename);
+ return new Path(basePath, partition + HoodieLocation.SEPARATOR +
filename);
}
}
}
diff --git
a/hudi-common/src/test/java/org/apache/hudi/common/fs/TestHoodieWrapperFileSystem.java
b/hudi-common/src/test/java/org/apache/hudi/common/fs/TestHoodieWrapperFileSystem.java
index 15887cb80e2..dc9fdf36740 100644
---
a/hudi-common/src/test/java/org/apache/hudi/common/fs/TestHoodieWrapperFileSystem.java
+++
b/hudi-common/src/test/java/org/apache/hudi/common/fs/TestHoodieWrapperFileSystem.java
@@ -24,6 +24,7 @@ import org.apache.hudi.common.util.Option;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.hadoop.fs.HoodieWrapperFileSystem;
import org.apache.hudi.hadoop.fs.NoOpConsistencyGuard;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -70,7 +71,7 @@ class TestHoodieWrapperFileSystem {
public void testCreateImmutableFileInPath() throws IOException {
HoodieWrapperFileSystem fs = new
HoodieWrapperFileSystem(HadoopFSUtils.getFs(basePath, new Configuration()), new
NoOpConsistencyGuard());
String testContent = "test content";
- Path testFile = new Path(basePath + Path.SEPARATOR + "clean.00000001");
+ Path testFile = new Path(basePath + HoodieLocation.SEPARATOR +
"clean.00000001");
// create same commit twice
fs.createImmutableFileInPath(testFile,
Option.of(getUTF8Bytes(testContent)));
diff --git
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/meta/CkpMetadata.java
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/meta/CkpMetadata.java
index 6d562412bb5..32baf6986a5 100644
---
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/meta/CkpMetadata.java
+++
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/meta/CkpMetadata.java
@@ -23,6 +23,7 @@ import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.VisibleForTesting;
import org.apache.hudi.exception.HoodieException;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -219,7 +220,8 @@ public class CkpMetadata implements Serializable,
AutoCloseable {
protected static String ckpMetaPath(String basePath, String uniqueId) {
// .hoodie/.aux/ckp_meta
- String metaPath = basePath + Path.SEPARATOR +
HoodieTableMetaClient.AUXILIARYFOLDER_NAME + Path.SEPARATOR + CKP_META;
+ String metaPath = basePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.AUXILIARYFOLDER_NAME
+ + HoodieLocation.SEPARATOR + CKP_META;
return StringUtils.isNullOrEmpty(uniqueId) ? metaPath : metaPath + "_" +
uniqueId;
}
diff --git
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/FileIndex.java
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/FileIndex.java
index 2ddf10ef171..68c2a05fccd 100644
---
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/FileIndex.java
+++
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/FileIndex.java
@@ -29,6 +29,7 @@ import org.apache.hudi.source.prune.DataPruner;
import org.apache.hudi.source.prune.PartitionPruners;
import org.apache.hudi.source.prune.PrimaryKeyPruners;
import org.apache.hudi.source.stats.ColumnStatsIndices;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.util.DataTypeUtils;
import org.apache.hudi.util.StreamerUtil;
@@ -120,7 +121,7 @@ public class FileIndex {
}
List<Map<String, String>> partitions = new ArrayList<>();
for (String partitionPath : partitionPaths) {
- String[] paths = partitionPath.split(Path.SEPARATOR);
+ String[] paths = partitionPath.split(HoodieLocation.SEPARATOR);
Map<String, String> partitionMapping = new LinkedHashMap<>();
if (hivePartition) {
Arrays.stream(paths).forEach(p -> {
diff --git
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/TableOptionProperties.java
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/TableOptionProperties.java
index 1930e738116..ac7a078f1f8 100644
---
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/TableOptionProperties.java
+++
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/TableOptionProperties.java
@@ -25,6 +25,7 @@ import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieValidationException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.sync.common.util.SparkDataSourceTableUtils;
import org.apache.hudi.util.AvroSchemaConverter;
@@ -155,7 +156,7 @@ public class TableOptionProperties {
}
private static Path getPropertiesFilePath(String basePath) {
- String auxPath = basePath + Path.SEPARATOR + AUXILIARYFOLDER_NAME;
+ String auxPath = basePath + HoodieLocation.SEPARATOR +
AUXILIARYFOLDER_NAME;
return new Path(auxPath, FILE_NAME);
}
diff --git
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FilePathUtils.java
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FilePathUtils.java
index 826b96f617f..78467abe9dc 100644
---
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FilePathUtils.java
+++
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FilePathUtils.java
@@ -20,6 +20,7 @@ package org.apache.hudi.table.format;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.util.DataTypeUtils;
import org.apache.flink.api.java.tuple.Tuple2;
@@ -98,7 +99,7 @@ public class FilePathUtils {
int i = 0;
for (Map.Entry<String, String> e : partitionKVs.entrySet()) {
if (i > 0) {
- suffixBuf.append(Path.SEPARATOR);
+ suffixBuf.append(HoodieLocation.SEPARATOR);
}
if (hivePartition) {
suffixBuf.append(escapePathName(e.getKey()));
@@ -108,7 +109,7 @@ public class FilePathUtils {
i++;
}
if (sepSuffix) {
- suffixBuf.append(Path.SEPARATOR);
+ suffixBuf.append(HoodieLocation.SEPARATOR);
}
return suffixBuf.toString();
}
diff --git
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ClientIds.java
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ClientIds.java
index 2fb8bd89307..82350a3b85b 100644
---
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ClientIds.java
+++
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ClientIds.java
@@ -24,6 +24,7 @@ import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.configuration.HadoopConfigurations;
import org.apache.hudi.exception.HoodieHeartbeatException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -148,7 +149,7 @@ public class ClientIds implements AutoCloseable,
Serializable {
// Utilities
// -------------------------------------------------------------------------
private String getHeartbeatFolderPath(String basePath) {
- return basePath + Path.SEPARATOR + AUXILIARYFOLDER_NAME + Path.SEPARATOR +
HEARTBEAT_FOLDER_NAME;
+ return basePath + HoodieLocation.SEPARATOR + AUXILIARYFOLDER_NAME +
HoodieLocation.SEPARATOR + HEARTBEAT_FOLDER_NAME;
}
private Path getHeartbeatFilePath(String basePath, String uniqueId) {
diff --git
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ViewStorageProperties.java
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ViewStorageProperties.java
index 7eea9536990..8e328aee4d2 100644
---
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ViewStorageProperties.java
+++
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/ViewStorageProperties.java
@@ -24,6 +24,7 @@ import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.configuration.HadoopConfigurations;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -82,7 +83,7 @@ public class ViewStorageProperties {
}
private static Path getPropertiesFilePath(String basePath, String uniqueId) {
- String auxPath = basePath + Path.SEPARATOR + AUXILIARYFOLDER_NAME;
+ String auxPath = basePath + HoodieLocation.SEPARATOR +
AUXILIARYFOLDER_NAME;
String fileName = StringUtils.isNullOrEmpty(uniqueId) ? FILE_NAME :
FILE_NAME + "_" + uniqueId;
return new Path(auxPath, fileName);
}
diff --git
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/ITTestDataStreamWrite.java
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/ITTestDataStreamWrite.java
index 954ca6593c3..8995d0247bc 100644
---
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/ITTestDataStreamWrite.java
+++
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/ITTestDataStreamWrite.java
@@ -28,6 +28,7 @@ import org.apache.hudi.exception.SchemaCompatibilityException;
import org.apache.hudi.sink.transform.ChainedTransformer;
import org.apache.hudi.sink.transform.Transformer;
import org.apache.hudi.sink.utils.Pipelines;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.catalog.HoodieCatalog;
import org.apache.hudi.table.catalog.TableOptionProperties;
import org.apache.hudi.util.AvroSchemaConverter;
@@ -440,7 +441,7 @@ public class ITTestDataStreamWrite extends TestLogger {
// create table dir
final String dbName = DEFAULT_DATABASE.defaultValue();
final String tableName = "t1";
- File testTable = new File(tempFile, dbName + Path.SEPARATOR + tableName);
+ File testTable = new File(tempFile, dbName + HoodieLocation.SEPARATOR +
tableName);
testTable.mkdir();
Configuration conf =
TestConfigurations.getDefaultConf(testTable.toURI().toString());
diff --git
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/bucket/ITTestBucketStreamWrite.java
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/bucket/ITTestBucketStreamWrite.java
index 9b8c15afcb6..8301b2ae99a 100644
---
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/bucket/ITTestBucketStreamWrite.java
+++
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/bucket/ITTestBucketStreamWrite.java
@@ -27,6 +27,7 @@ import org.apache.hudi.common.testutils.FileCreateUtils;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.index.HoodieIndex.IndexType;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.FlinkMiniCluster;
import org.apache.hudi.utils.TestConfigurations;
@@ -109,7 +110,7 @@ public class ITTestBucketStreamWrite {
// delete successful commit to simulate an unsuccessful write
FileSystem fs = metaClient.getFs();
- Path path = new Path(metaClient.getMetaPath() + Path.SEPARATOR + filename);
+ Path path = new Path(metaClient.getMetaPath() + HoodieLocation.SEPARATOR +
filename);
fs.delete(path);
commitMetadata.getFileIdAndRelativePaths().forEach((fileId, relativePath)
-> {
diff --git
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/utils/TestWriteBase.java
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/utils/TestWriteBase.java
index 27d66d1773f..b31cc0f8b19 100644
---
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/utils/TestWriteBase.java
+++
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/utils/TestWriteBase.java
@@ -34,6 +34,7 @@ import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.sink.event.WriteMetadataEvent;
import org.apache.hudi.sink.meta.CkpMetadata;
import org.apache.hudi.sink.meta.CkpMetadataFactory;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestData;
import org.apache.hudi.utils.TestUtils;
@@ -497,7 +498,8 @@ public class TestWriteBase {
HoodieActiveTimeline.deleteInstantFile(metaClient.getFs(),
metaClient.getMetaPath(), lastCompletedInstant.get());
// refresh the heartbeat in case it is timed out.
OutputStream outputStream =
- metaClient.getFs().create(new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath) + Path.SEPARATOR +
this.lastComplete), true);
+ metaClient.getFs().create(new
Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath)
+ + HoodieLocation.SEPARATOR + this.lastComplete), true);
outputStream.close();
this.lastPending = this.lastComplete;
this.lastComplete = lastCompleteInstant();
diff --git
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/utils/TestUtils.java
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/utils/TestUtils.java
index bcff31cb305..611eb889f8d 100644
---
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/utils/TestUtils.java
+++
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/utils/TestUtils.java
@@ -28,6 +28,7 @@ import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.configuration.HadoopConfigurations;
import org.apache.hudi.source.StreamReadMonitoringFunction;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.util.StreamerUtil;
@@ -107,7 +108,7 @@ public class TestUtils {
public static String getSplitPartitionPath(MergeOnReadInputSplit split) {
assertTrue(split.getLogPaths().isPresent());
final String logPath = split.getLogPaths().get().get(0);
- String[] paths = logPath.split(Path.SEPARATOR);
+ String[] paths = logPath.split(HoodieLocation.SEPARATOR);
return paths[paths.length - 2];
}
diff --git
a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
index 80e1186776f..505acccee87 100644
---
a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
+++
b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
@@ -43,6 +43,7 @@ import
org.apache.hudi.hadoop.realtime.HoodieHFileRealtimeInputFormat;
import org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat;
import org.apache.hudi.hadoop.realtime.HoodieRealtimeFileSplit;
import org.apache.hudi.hadoop.realtime.HoodieRealtimePath;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -244,7 +245,7 @@ public class HoodieInputFormatUtils {
return Option.empty();
}
String incrementalInputPaths = partitionsToList.stream()
- .map(s -> StringUtils.isNullOrEmpty(s) ? tableMetaClient.getBasePath()
: tableMetaClient.getBasePath() + Path.SEPARATOR + s)
+ .map(s -> StringUtils.isNullOrEmpty(s) ? tableMetaClient.getBasePath()
: tableMetaClient.getBasePath() + HoodieLocation.SEPARATOR + s)
.filter(s -> {
/*
* Ensure to return only results from the original input path that
has incremental changes
diff --git
a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestInputPathHandler.java
b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestInputPathHandler.java
index 561851c8e2b..b88b58f1ad9 100644
---
a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestInputPathHandler.java
+++
b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestInputPathHandler.java
@@ -25,6 +25,7 @@ import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.testutils.minicluster.HdfsTestService;
import org.apache.hudi.hadoop.utils.HoodieHiveUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -167,12 +168,12 @@ public class TestInputPathHandler {
static List<Path> generatePartitions(DistributedFileSystem dfs, String
basePath)
throws IOException {
List<Path> paths = new ArrayList<>();
- paths.add(new Path(basePath + Path.SEPARATOR + "2019/05/21"));
- paths.add(new Path(basePath + Path.SEPARATOR + "2019/05/22"));
- paths.add(new Path(basePath + Path.SEPARATOR + "2019/05/23"));
- paths.add(new Path(basePath + Path.SEPARATOR + "2019/05/24"));
- paths.add(new Path(basePath + Path.SEPARATOR + "2019/05/25"));
- for (Path path: paths) {
+ paths.add(new Path(basePath + HoodieLocation.SEPARATOR + "2019/05/21"));
+ paths.add(new Path(basePath + HoodieLocation.SEPARATOR + "2019/05/22"));
+ paths.add(new Path(basePath + HoodieLocation.SEPARATOR + "2019/05/23"));
+ paths.add(new Path(basePath + HoodieLocation.SEPARATOR + "2019/05/24"));
+ paths.add(new Path(basePath + HoodieLocation.SEPARATOR + "2019/05/25"));
+ for (Path path : paths) {
dfs.mkdirs(path);
}
return paths;
diff --git
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
index a9c5efd7c72..c03ac3e1376 100644
---
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
+++
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
@@ -17,20 +17,22 @@
package org.apache.spark.sql.hudi.command.procedures
-import org.apache.avro.generic.GenericRecord
-import org.apache.avro.specific.SpecificData
-import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.apache.hudi.HoodieCLIUtils
import org.apache.hudi.avro.HoodieAvroUtils
import org.apache.hudi.avro.model.HoodieArchivedMetaEntry
-import org.apache.hudi.common.fs.FSUtils
import org.apache.hudi.common.model.HoodieLogFile
+import org.apache.hudi.common.model.HoodieRecord.HoodieRecordType
import org.apache.hudi.common.table.HoodieTableMetaClient
import org.apache.hudi.common.table.log.HoodieLogFormat
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock
import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline,
TimelineMetadataUtils}
import org.apache.hudi.exception.HoodieException
+import org.apache.hudi.hadoop.fs.HadoopFSUtils
+import org.apache.hudi.storage.HoodieLocation
+import org.apache.avro.generic.GenericRecord
+import org.apache.avro.specific.SpecificData
+import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{DataTypes, Metadata, StructField,
StructType}
@@ -39,8 +41,6 @@ import java.io.File
import java.util
import java.util.Collections
import java.util.function.Supplier
-import org.apache.hudi.common.model.HoodieRecord.HoodieRecordType
-import org.apache.hudi.hadoop.fs.HadoopFSUtils
import scala.collection.JavaConverters._
import scala.util.control.Breaks.break
@@ -159,7 +159,7 @@ class ExportInstantsProcedure extends BaseProcedure with
ProcedureBuilder with L
null
}
val instantTime = archiveEntryRecord.get("commitTime").toString
- val outPath = localFolder + Path.SEPARATOR + instantTime + "." +
action
+ val outPath = localFolder + HoodieLocation.SEPARATOR + instantTime
+ "." + action
if (metadata != null) writeToFile(fileSystem, outPath,
HoodieAvroUtils.avroToJson(metadata, true))
if ( {
copyCount += 1;
@@ -182,7 +182,7 @@ class ExportInstantsProcedure extends BaseProcedure with
ProcedureBuilder with L
val timeline = metaClient.getActiveTimeline
val fileSystem = HadoopFSUtils.getFs(metaClient.getBasePath,
jsc.hadoopConfiguration())
for (instant <- instants) {
- val localPath = localFolder + Path.SEPARATOR + instant.getFileName
+ val localPath = localFolder + HoodieLocation.SEPARATOR +
instant.getFileName
val data: Array[Byte] = instant.getAction match {
case HoodieTimeline.CLEAN_ACTION =>
val metadata =
TimelineMetadataUtils.deserializeHoodieCleanMetadata(timeline.getInstantDetails(instant).get)
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/testutils/DataSourceTestUtils.java
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/testutils/DataSourceTestUtils.java
index 28c8df82e8e..95d6249a375 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/testutils/DataSourceTestUtils.java
+++
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/testutils/DataSourceTestUtils.java
@@ -22,6 +22,7 @@ import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.FileIOUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
@@ -137,7 +138,7 @@ public class DataSourceTestUtils {
while (files.hasNext()) {
LocatedFileStatus file = files.next();
// skip meta folder
- if (file.isFile() &&
!file.getPath().toString().contains(HoodieTableMetaClient.METAFOLDER_NAME +
Path.SEPARATOR)) {
+ if (file.isFile() &&
!file.getPath().toString().contains(HoodieTableMetaClient.METAFOLDER_NAME +
HoodieLocation.SEPARATOR)) {
if (FSUtils.isBaseFile(file.getPath())) {
return false;
}
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieFileIndex.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieFileIndex.scala
index 803702addb4..df07c72f090 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieFileIndex.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieFileIndex.scala
@@ -17,45 +17,48 @@
package org.apache.hudi
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.Path
import org.apache.hudi.DataSourceReadOptions.{FILE_INDEX_LISTING_MODE_EAGER,
FILE_INDEX_LISTING_MODE_LAZY, QUERY_TYPE, QUERY_TYPE_SNAPSHOT_OPT_VAL}
import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.HoodieConversionUtils.toJavaOption
import org.apache.hudi.HoodieFileIndex.DataSkippingFailureMode
import org.apache.hudi.client.HoodieJavaWriteClient
import org.apache.hudi.client.common.HoodieJavaEngineContext
-import
org.apache.hudi.common.config.TimestampKeyGeneratorConfig.{TIMESTAMP_INPUT_DATE_FORMAT,
TIMESTAMP_OUTPUT_DATE_FORMAT, TIMESTAMP_TYPE_FIELD}
import org.apache.hudi.common.config.{HoodieMetadataConfig,
HoodieStorageConfig}
+import
org.apache.hudi.common.config.TimestampKeyGeneratorConfig.{TIMESTAMP_INPUT_DATE_FORMAT,
TIMESTAMP_OUTPUT_DATE_FORMAT, TIMESTAMP_TYPE_FIELD}
import org.apache.hudi.common.engine.EngineType
import org.apache.hudi.common.fs.FSUtils
import org.apache.hudi.common.model.{HoodieBaseFile, HoodieRecord,
HoodieTableType}
-import org.apache.hudi.common.table.view.HoodieTableFileSystemView
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient}
+import org.apache.hudi.common.table.view.HoodieTableFileSystemView
+import org.apache.hudi.common.testutils.{HoodieTestDataGenerator,
HoodieTestUtils}
import org.apache.hudi.common.testutils.HoodieTestTable.makeNewCommitTime
import org.apache.hudi.common.testutils.RawTripTestPayload.recordsToStrings
-import org.apache.hudi.common.testutils.{HoodieTestDataGenerator,
HoodieTestUtils}
import org.apache.hudi.common.util.PartitionPathEncodeUtils
import org.apache.hudi.common.util.StringUtils.isNullOrEmpty
import org.apache.hudi.config.HoodieWriteConfig
import org.apache.hudi.exception.HoodieException
import org.apache.hudi.keygen.TimestampBasedAvroKeyGenerator.TimestampType
import org.apache.hudi.metadata.HoodieTableMetadata
+import org.apache.hudi.storage.HoodieLocation
import org.apache.hudi.testutils.HoodieSparkClientTestBase
import org.apache.hudi.util.JFunction
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.Path
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.expressions.{And, AttributeReference,
EqualTo, GreaterThanOrEqual, LessThan, Literal}
import org.apache.spark.sql.execution.datasources.{NoopCache,
PartitionDirectory}
import org.apache.spark.sql.functions.{lit, struct}
import org.apache.spark.sql.hudi.HoodieSparkSessionExtension
import org.apache.spark.sql.types.{IntegerType, StringType}
-import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue}
import org.junit.jupiter.api.{BeforeEach, Test}
+import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue}
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.{Arguments, CsvSource, MethodSource,
ValueSource}
import java.util.Properties
import java.util.function.Consumer
+
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.util.Random
@@ -813,9 +816,9 @@ class TestHoodieFileIndex extends HoodieSparkClientTestBase
with ScalaAssertionS
if (hiveStylePartitioning) {
partitionNames.zip(partitionValues).map {
case (name, value) => s"$name=$value"
- }.mkString(Path.SEPARATOR)
+ }.mkString(HoodieLocation.SEPARATOR)
} else {
- partitionValues.mkString(Path.SEPARATOR)
+ partitionValues.mkString(HoodieLocation.SEPARATOR)
}
}
}
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
index a8ac9b5e317..fc45509190c 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
@@ -17,11 +17,12 @@
package org.apache.spark.sql.hudi.procedure
-import org.apache.hadoop.fs.Path
import org.apache.hudi.common.model.HoodieTableType
import org.apache.hudi.common.table.HoodieTableMetaClient
import org.apache.hudi.functional.TestBootstrap
import org.apache.hudi.keygen.constant.KeyGeneratorOptions
+import org.apache.hudi.storage.HoodieLocation
+
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.sql.{Dataset, Row}
@@ -40,8 +41,8 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val basePath = s"${tmp.getCanonicalPath}"
val srcName: String = "source"
- val sourcePath = basePath + Path.SEPARATOR + srcName
- val tablePath = basePath + Path.SEPARATOR + tableName
+ val sourcePath = basePath + HoodieLocation.SEPARATOR + srcName
+ val tablePath = basePath + HoodieLocation.SEPARATOR + tableName
val jsc = new JavaSparkContext(spark.sparkContext)
// generate test data
@@ -49,7 +50,7 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val timestamp: Long = Instant.now.toEpochMilli
for (i <- 0 until partitions.size) {
val df: Dataset[Row] =
TestBootstrap.generateTestRawTripDataset(timestamp, i * NUM_OF_RECORDS, i *
NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc, spark.sqlContext)
- df.write.parquet(sourcePath + Path.SEPARATOR + PARTITION_FIELD + "=" +
partitions.get(i))
+ df.write.parquet(sourcePath + HoodieLocation.SEPARATOR +
PARTITION_FIELD + "=" + partitions.get(i))
}
spark.sql("set hoodie.bootstrap.parallelism = 20")
@@ -105,8 +106,8 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val basePath = s"${tmp.getCanonicalPath}"
val srcName: String = "source"
- val sourcePath = basePath + Path.SEPARATOR + srcName
- val tablePath = basePath + Path.SEPARATOR + tableName
+ val sourcePath = basePath + HoodieLocation.SEPARATOR + srcName
+ val tablePath = basePath + HoodieLocation.SEPARATOR + tableName
val jsc = new JavaSparkContext(spark.sparkContext)
// generate test data
@@ -114,7 +115,7 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val timestamp: Long = Instant.now.toEpochMilli
for (i <- 0 until partitions.size) {
val df: Dataset[Row] =
TestBootstrap.generateTestRawTripDataset(timestamp, i * NUM_OF_RECORDS, i *
NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc, spark.sqlContext)
- df.write.parquet(sourcePath + Path.SEPARATOR + PARTITION_FIELD + "=" +
partitions.get(i))
+ df.write.parquet(sourcePath + HoodieLocation.SEPARATOR +
PARTITION_FIELD + "=" + partitions.get(i))
}
spark.sql("set hoodie.bootstrap.parallelism = 20")
@@ -171,8 +172,8 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val basePath = s"${tmp.getCanonicalPath}"
val srcName: String = "source"
- val sourcePath = basePath + Path.SEPARATOR + srcName
- val tablePath = basePath + Path.SEPARATOR + tableName
+ val sourcePath = basePath + HoodieLocation.SEPARATOR + srcName
+ val tablePath = basePath + HoodieLocation.SEPARATOR + tableName
val jsc = new JavaSparkContext(spark.sparkContext)
// generate test data
@@ -227,8 +228,8 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val basePath = s"${tmp.getCanonicalPath}"
val srcName: String = "source"
- val sourcePath = basePath + Path.SEPARATOR + srcName
- val tablePath = basePath + Path.SEPARATOR + tableName
+ val sourcePath = basePath + HoodieLocation.SEPARATOR + srcName
+ val tablePath = basePath + HoodieLocation.SEPARATOR + tableName
val jsc = new JavaSparkContext(spark.sparkContext)
// generate test data
@@ -236,7 +237,7 @@ class TestBootstrapProcedure extends
HoodieSparkProcedureTestBase {
val timestamp: Long = Instant.now.toEpochMilli
for (i <- 0 until partitions.size) {
val df: Dataset[Row] =
TestBootstrap.generateTestRawTripDataset(timestamp, i * NUM_OF_RECORDS, i *
NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc, spark.sqlContext)
- df.write.parquet(sourcePath + Path.SEPARATOR + PARTITION_FIELD + "=" +
partitions.get(i))
+ df.write.parquet(sourcePath + HoodieLocation.SEPARATOR +
PARTITION_FIELD + "=" + partitions.get(i))
}
spark.sql("set hoodie.bootstrap.parallelism = 20")
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
index 595e9173cbe..9ca3ff0719b 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
@@ -26,6 +26,7 @@ import
org.apache.hudi.common.table.timeline.HoodieActiveTimeline
import org.apache.hudi.common.testutils.{HoodieTestDataGenerator,
HoodieTestUtils}
import org.apache.hudi.common.util.StringUtils.getUTF8Bytes
import org.apache.hudi.hadoop.fs.HadoopFSUtils
+import org.apache.hudi.storage.HoodieLocation
import org.apache.hudi.testutils.HoodieClientTestUtils
import org.apache.parquet.avro.AvroParquetWriter
@@ -46,7 +47,7 @@ class TestHdfsParquetImportProcedure extends
HoodieSparkProcedureTestBase {
withTempDir { tmp =>
val fs: FileSystem = HadoopFSUtils.getFs(tmp.getCanonicalPath,
spark.sparkContext.hadoopConfiguration)
val tableName = generateTableName
- val tablePath = tmp.getCanonicalPath + Path.SEPARATOR + tableName
+ val tablePath = tmp.getCanonicalPath + HoodieLocation.SEPARATOR +
tableName
val sourcePath = new Path(tmp.getCanonicalPath, "source")
val targetPath = new Path(tablePath)
val schemaFile = new Path(tmp.getCanonicalPath, "file.schema").toString
@@ -79,7 +80,7 @@ class TestHdfsParquetImportProcedure extends
HoodieSparkProcedureTestBase {
withTempDir { tmp =>
val fs: FileSystem = HadoopFSUtils.getFs(tmp.getCanonicalPath,
spark.sparkContext.hadoopConfiguration)
val tableName = generateTableName
- val tablePath = tmp.getCanonicalPath + Path.SEPARATOR + tableName
+ val tablePath = tmp.getCanonicalPath + HoodieLocation.SEPARATOR +
tableName
val sourcePath = new Path(tmp.getCanonicalPath, "source")
val targetPath = new Path(tablePath)
val schemaFile = new Path(tmp.getCanonicalPath, "file.schema").toString
diff --git
a/hudi-spark-datasource/hudi-spark3.2plus-common/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieSpark32PlusAnalysis.scala
b/hudi-spark-datasource/hudi-spark3.2plus-common/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieSpark32PlusAnalysis.scala
index 9c2f5bfb58c..f4de486dff0 100644
---
a/hudi-spark-datasource/hudi-spark3.2plus-common/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieSpark32PlusAnalysis.scala
+++
b/hudi-spark-datasource/hudi-spark3.2plus-common/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieSpark32PlusAnalysis.scala
@@ -17,27 +17,27 @@
package org.apache.spark.sql.hudi.analysis
-import org.apache.hadoop.fs.Path
import org.apache.hudi.{DataSourceReadOptions, DefaultSource,
SparkAdapterSupport}
+import org.apache.hudi.storage.HoodieLocation
+
+import org.apache.spark.sql.{AnalysisException, SparkSession}
import org.apache.spark.sql.HoodieSpark3CatalystPlanUtils.MatchResolvedTable
-import
org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer.resolveExpressionByPlanChildren
import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases,
NamedRelation, ResolvedFieldName, UnresolvedAttribute, UnresolvedFieldName,
UnresolvedPartitionSpec}
+import
org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer.resolveExpressionByPlanChildren
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogUtils}
import org.apache.spark.sql.catalyst.expressions.Expression
import
org.apache.spark.sql.catalyst.plans.logcal.{HoodieFileSystemViewTableValuedFunction,
HoodieFileSystemViewTableValuedFunctionOptionsParser,
HoodieMetadataTableValuedFunction, HoodieQuery, HoodieTableChanges,
HoodieTableChangesOptionsParser, HoodieTimelineTableValuedFunction,
HoodieTimelineTableValuedFunctionOptionsParser}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.Origin
-import
org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper
import org.apache.spark.sql.connector.catalog.{Table, V1Table}
-import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
+import
org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper
import org.apache.spark.sql.execution.datasources.{DataSource, LogicalRelation}
import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.isMetaField
import org.apache.spark.sql.hudi.ProvidesHoodieConfig
import
org.apache.spark.sql.hudi.analysis.HoodieSpark32PlusAnalysis.{HoodieV1OrV2Table,
ResolvesToHudiTable}
import org.apache.spark.sql.hudi.catalog.HoodieInternalV2Table
import
org.apache.spark.sql.hudi.command.{AlterHoodieTableDropPartitionCommand,
ShowHoodieTablePartitionsCommand, TruncateHoodieTableCommand}
-import org.apache.spark.sql.{AnalysisException, SQLContext, SparkSession}
/**
* NOTE: PLEASE READ CAREFULLY
@@ -92,7 +92,7 @@ case class HoodieSpark32PlusResolveReferences(spark:
SparkSession) extends Rule[
case HoodieTableChanges(args) =>
val (tablePath, opts) =
HoodieTableChangesOptionsParser.parseOptions(args, HoodieTableChanges.FUNC_NAME)
val hoodieDataSource = new DefaultSource
- if (tablePath.contains(Path.SEPARATOR)) {
+ if (tablePath.contains(HoodieLocation.SEPARATOR)) {
// the first param is table path
val relation = hoodieDataSource.createRelation(spark.sqlContext, opts
++ Map("path" -> tablePath))
LogicalRelation(relation)
@@ -107,7 +107,7 @@ case class HoodieSpark32PlusResolveReferences(spark:
SparkSession) extends Rule[
case HoodieTimelineTableValuedFunction(args) =>
val (tablePath, opts) =
HoodieTimelineTableValuedFunctionOptionsParser.parseOptions(args,
HoodieTimelineTableValuedFunction.FUNC_NAME)
val hoodieDataSource = new DefaultSource
- if (tablePath.contains(Path.SEPARATOR)) {
+ if (tablePath.contains(HoodieLocation.SEPARATOR)) {
// the first param is table path
val relation = hoodieDataSource.createRelation(spark.sqlContext, opts
++ Map("path" -> tablePath))
LogicalRelation(relation)
@@ -122,7 +122,7 @@ case class HoodieSpark32PlusResolveReferences(spark:
SparkSession) extends Rule[
case HoodieFileSystemViewTableValuedFunction(args) =>
val (tablePath, opts) =
HoodieFileSystemViewTableValuedFunctionOptionsParser.parseOptions(args,
HoodieFileSystemViewTableValuedFunction.FUNC_NAME)
val hoodieDataSource = new DefaultSource
- if (tablePath.contains(Path.SEPARATOR)) {
+ if (tablePath.contains(HoodieLocation.SEPARATOR)) {
// the first param is table path
val relation = hoodieDataSource.createRelation(spark.sqlContext, opts
++ Map("path" -> tablePath))
LogicalRelation(relation)
@@ -137,7 +137,7 @@ case class HoodieSpark32PlusResolveReferences(spark:
SparkSession) extends Rule[
case HoodieMetadataTableValuedFunction(args) =>
val (tablePath, opts) =
HoodieMetadataTableValuedFunction.parseOptions(args,
HoodieMetadataTableValuedFunction.FUNC_NAME)
val hoodieDataSource = new DefaultSource
- if (tablePath.contains(Path.SEPARATOR)) {
+ if (tablePath.contains(HoodieLocation.SEPARATOR)) {
// the first param is table path
val relation = hoodieDataSource.createRelation(spark.sqlContext, opts
++ Map("path" -> (tablePath + "/.hoodie/metadata")))
LogicalRelation(relation)
diff --git
a/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/testutils/HiveTestService.java
b/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/testutils/HiveTestService.java
index ad1918eabf8..29d14400530 100644
---
a/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/testutils/HiveTestService.java
+++
b/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/testutils/HiveTestService.java
@@ -20,9 +20,9 @@ package org.apache.hudi.hive.testutils;
import org.apache.hudi.common.testutils.NetworkTestUtils;
import org.apache.hudi.common.util.FileIOUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
@@ -220,7 +220,7 @@ public class HiveTestService {
}
private static String getHiveLocation(String baseLocation) {
- return baseLocation + Path.SEPARATOR + "hive";
+ return baseLocation + HoodieLocation.SEPARATOR + "hive";
}
private HiveServer2 startHiveServer(HiveConf serverConf) {
diff --git
a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
index 5cc3d431d30..931bd421b39 100644
---
a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
+++
b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
@@ -25,6 +25,7 @@ import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.HoodieTimer;
import org.apache.hudi.common.util.MarkerUtils;
import org.apache.hudi.exception.HoodieIOException;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.timeline.service.handlers.MarkerHandler;
import org.apache.hadoop.conf.Configuration;
@@ -87,7 +88,7 @@ public class MarkerBasedEarlyConflictDetectionRunnable
implements Runnable {
// and the markers from the requests pending processing.
currentInstantAllMarkers.addAll(markerHandler.getAllMarkers(markerDir));
currentInstantAllMarkers.addAll(pendingMarkers);
- Path tempPath = new Path(basePath + Path.SEPARATOR +
HoodieTableMetaClient.TEMPFOLDER_NAME);
+ Path tempPath = new Path(basePath + HoodieLocation.SEPARATOR +
HoodieTableMetaClient.TEMPFOLDER_NAME);
List<Path> instants = MarkerUtils.getAllMarkerDir(tempPath, fs);
diff --git
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/SparkSampleWritesUtils.java
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/SparkSampleWritesUtils.java
index 11a19b030fc..d4fc5e8053a 100644
---
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/SparkSampleWritesUtils.java
+++
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/SparkSampleWritesUtils.java
@@ -34,6 +34,7 @@ import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.hadoop.fs.CachingPath;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
+import org.apache.hudi.storage.HoodieLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -138,7 +139,7 @@ public class SparkSampleWritesUtils {
}
private static String getSampleWritesBasePath(JavaSparkContext jsc,
HoodieWriteConfig writeConfig, String instantTime) throws IOException {
- Path basePath = new CachingPath(writeConfig.getBasePath(),
SAMPLE_WRITES_FOLDER_PATH + Path.SEPARATOR + instantTime);
+ Path basePath = new CachingPath(writeConfig.getBasePath(),
SAMPLE_WRITES_FOLDER_PATH + HoodieLocation.SEPARATOR + instantTime);
FileSystem fs = HadoopFSUtils.getFs(basePath, jsc.hadoopConfiguration());
if (fs.exists(basePath)) {
fs.delete(basePath, true);