nsivabalan commented on a change in pull request #3472:
URL: https://github.com/apache/hudi/pull/3472#discussion_r688694788
##########
File path:
hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/marker/DirectWriteMarkers.java
##########
@@ -173,6 +180,7 @@ private String translateMarkerToDataPath(String markerPath)
{
try {
if (!fs.exists(dirPath)) {
fs.mkdirs(dirPath); // create a new partition as needed.
+ MarkerUtils.writeMarkerTypeToFile(MarkerType.DIRECT, fs,
markerDirPath.toString());
Review comment:
same here.
##########
File path:
hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/marker/DirectWriteMarkers.java
##########
@@ -158,7 +162,10 @@ private String translateMarkerToDataPath(String
markerPath) {
Set<String> markerFiles = new HashSet<>();
if (doesMarkerDirExist()) {
FSUtils.processFiles(fs, markerDirPath.toString(), fileStatus -> {
-
markerFiles.add(stripMarkerFolderPrefix(fileStatus.getPath().toString()));
+ String filePathStr = fileStatus.getPath().toString();
+ if (!filePathStr.contains(MARKER_TYPE_FILENAME)) {
Review comment:
Since we agreed that we don't need marker type file with Direct, do we
still need this check ?
##########
File path:
hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/table/action/rollback/SparkMarkerBasedRollbackStrategy.java
##########
@@ -91,4 +96,40 @@ public SparkMarkerBasedRollbackStrategy(HoodieTable<T,
JavaRDD<HoodieRecord<T>>,
FSUtils.getPartitionPath(config.getBasePath(), partitionPathStr),
fileId, HoodieFileFormat.HOODIE_LOG.getFileExtension(), baseCommitTime)
.collect(Collectors.toMap(HoodieLogFile::getFileStatus, value ->
value.getFileStatus().getLen()));
}
+
+ /**
+ * Gets all marker file paths
+ *
+ * @param instant
+ * @param parallelism
+ * @return
+ * @throws IOException
+ */
+ private List<String> getAllMarkerFilePaths(String instant, int parallelism)
throws IOException {
+ String markerDir = table.getMetaClient().getMarkerFolderPath(instant);
+ FileSystem fileSystem = FSUtils.getFs(markerDir,
context.getHadoopConf().newCopy());
+ Option<MarkerType> markerTypeOption =
MarkerUtils.readMarkerType(fileSystem, markerDir);
+
+ if (!markerTypeOption.isPresent()) {
+ WriteMarkers writeMarkers = WriteMarkersFactory.get(MarkerType.DIRECT,
table, instant);
+ return new ArrayList<>(writeMarkers.allMarkerFilePaths());
+ }
+
+ switch (markerTypeOption.get()) {
+ case DIRECT:
+ WriteMarkers writeMarkers = WriteMarkersFactory.get(MarkerType.DIRECT,
table, instant);
+ return new ArrayList<>(writeMarkers.allMarkerFilePaths());
+ case TIMELINE_SERVER_BASED:
+ // Reads all markers written by the timeline server
+ Map<String, Set<String>> markersMap =
+ MarkerUtils.readTimelineServerBasedMarkersFromFileSystem(
+ markerDir, fileSystem, context, parallelism);
+ List<String> markers = new ArrayList<>();
+ markersMap.forEach((key, value) -> markers.addAll(value));
Review comment:
can we do this in one line?
markersMap.values(). .. collect() or something.
##########
File path:
hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/table/action/rollback/SparkMarkerBasedRollbackStrategy.java
##########
@@ -91,4 +96,40 @@ public SparkMarkerBasedRollbackStrategy(HoodieTable<T,
JavaRDD<HoodieRecord<T>>,
FSUtils.getPartitionPath(config.getBasePath(), partitionPathStr),
fileId, HoodieFileFormat.HOODIE_LOG.getFileExtension(), baseCommitTime)
.collect(Collectors.toMap(HoodieLogFile::getFileStatus, value ->
value.getFileStatus().getLen()));
}
+
+ /**
+ * Gets all marker file paths
+ *
+ * @param instant
+ * @param parallelism
+ * @return
+ * @throws IOException
+ */
+ private List<String> getAllMarkerFilePaths(String instant, int parallelism)
throws IOException {
+ String markerDir = table.getMetaClient().getMarkerFolderPath(instant);
+ FileSystem fileSystem = FSUtils.getFs(markerDir,
context.getHadoopConf().newCopy());
+ Option<MarkerType> markerTypeOption =
MarkerUtils.readMarkerType(fileSystem, markerDir);
+
+ if (!markerTypeOption.isPresent()) {
+ WriteMarkers writeMarkers = WriteMarkersFactory.get(MarkerType.DIRECT,
table, instant);
+ return new ArrayList<>(writeMarkers.allMarkerFilePaths());
+ }
+
+ switch (markerTypeOption.get()) {
+ case DIRECT:
Review comment:
guess we need to fix this as well
##########
File path:
hudi-common/src/main/java/org/apache/hudi/common/util/MarkerUtils.java
##########
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hudi.common.util;
+
+import org.apache.hudi.common.config.SerializableConfiguration;
+import org.apache.hudi.common.engine.HoodieEngineContext;
+import org.apache.hudi.common.table.marker.MarkerType;
+import org.apache.hudi.common.util.collection.ImmutablePair;
+import org.apache.hudi.exception.HoodieException;
+import org.apache.hudi.exception.HoodieIOException;
+
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+import static org.apache.hudi.common.util.FileIOUtils.closeQuietly;
+
+/**
+ * A utility class for marker related operations.
+ */
+public class MarkerUtils {
+ public static final String MARKERS_FILENAME_PREFIX = "MARKERS";
+ public static final String MARKER_TYPE_FILENAME = MARKERS_FILENAME_PREFIX +
".type";
+ private static final Logger LOG = LogManager.getLogger(MarkerUtils.class);
+
+ /**
+ * Reads the marker type from `MARKERS.type` file.
+ *
+ * @param fileSystem file system to use.
+ * @param markerDir marker directory.
+ * @return the marker type, or empty if the marker type file does not exist.
+ */
+ public static Option<MarkerType> readMarkerType(FileSystem fileSystem,
String markerDir) {
+ Path markerTypeFilePath = new Path(markerDir, MARKER_TYPE_FILENAME);
+ FSDataInputStream fsDataInputStream = null;
+ String content = null;
Review comment:
we can do Option<String> content = Option.empty();
overide in line 78.
and so we can remove lines 86 to 88.
##########
File path:
hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/marker/WriteMarkersFactory.java
##########
@@ -20,6 +20,7 @@
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.fs.StorageSchemes;
+import org.apache.hudi.common.table.marker.MarkerType;
Review comment:
guess this import is unused
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]