This is an automated email from the ASF dual-hosted git repository.

pankajkumar pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/master by this push:
     new 7e3d04e1762 HBASE-28806 ExportSnapshot failed if reference file 
present (#6461)
7e3d04e1762 is described below

commit 7e3d04e17620c01e651152a5b61fc83359ee6fda
Author: mokai <[email protected]>
AuthorDate: Fri Nov 22 16:45:16 2024 +0800

    HBASE-28806 ExportSnapshot failed if reference file present (#6461)
    
    Signed-off-by: Duo Zhang <[email protected]>
    Signed-off-by: Pankaj Kumar<[email protected]>
---
 .../hadoop/hbase/snapshot/ExportSnapshot.java      | 11 ++++-
 .../hadoop/hbase/snapshot/TestExportSnapshot.java  | 48 ++++++++++++++++++++++
 2 files changed, 58 insertions(+), 1 deletion(-)

diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 186289e517c..6f70eefd3b7 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -26,8 +26,10 @@ import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -659,6 +661,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
     // Get snapshot files
     LOG.info("Loading Snapshot '" + snapshotDesc.getName() + "' hfile list");
+    Set<String> addedFiles = new HashSet<>();
     SnapshotReferenceUtil.visitReferencedFiles(conf, fs, snapshotDir, 
snapshotDesc,
       new SnapshotReferenceUtil.SnapshotVisitor() {
         @Override
@@ -678,7 +681,13 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
             snapshotFileAndSize = getSnapshotFileAndSize(fs, conf, table, 
referencedRegion, family,
               referencedHFile, storeFile.hasFileSize() ? 
storeFile.getFileSize() : -1);
           }
-          files.add(snapshotFileAndSize);
+          String fileToExport = snapshotFileAndSize.getFirst().getHfile();
+          if (!addedFiles.contains(fileToExport)) {
+            files.add(snapshotFileAndSize);
+            addedFiles.add(fileToExport);
+          } else {
+            LOG.debug("Skip the existing file: {}.", fileToExport);
+          }
         }
       });
 
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index 133737bb397..806d87c69d0 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -51,10 +51,12 @@ import 
org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
+import org.apache.hadoop.hbase.tool.BulkLoadHFilesTool;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.HFileTestUtil;
 import org.apache.hadoop.hbase.util.Pair;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -206,6 +208,52 @@ public class TestExportSnapshot {
     TEST_UTIL.deleteTable(tableName0);
   }
 
+  @Test
+  public void testExportFileSystemStateWithSplitRegion() throws Exception {
+    // disable compaction
+    admin.compactionSwitch(false,
+      admin.getRegionServers().stream().map(a -> 
a.getServerName()).collect(Collectors.toList()));
+    // create Table
+    TableName splitTableName = TableName.valueOf(testName.getMethodName());
+    String splitTableSnap = "snapshot-" + testName.getMethodName();
+    
admin.createTable(TableDescriptorBuilder.newBuilder(splitTableName).setColumnFamilies(
+      
Lists.newArrayList(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build())).build());
+
+    Path output = TEST_UTIL.getDataTestDir("output/cf");
+    TEST_UTIL.getTestFileSystem().mkdirs(output);
+    // Create and load a large hfile to ensure the execution time of MR job.
+    HFileTestUtil.createHFile(TEST_UTIL.getConfiguration(), 
TEST_UTIL.getTestFileSystem(),
+      new Path(output, "test_file"), FAMILY, Bytes.toBytes("q"), 
Bytes.toBytes("1"),
+      Bytes.toBytes("9"), 9999999);
+    BulkLoadHFilesTool tool = new 
BulkLoadHFilesTool(TEST_UTIL.getConfiguration());
+    tool.run(new String[] { output.getParent().toString(), 
splitTableName.getNameAsString() });
+
+    List<RegionInfo> regions = admin.getRegions(splitTableName);
+    assertEquals(1, regions.size());
+    tableNumFiles = regions.size();
+
+    // split region
+    admin.split(splitTableName, Bytes.toBytes("5"));
+    regions = admin.getRegions(splitTableName);
+    assertEquals(2, regions.size());
+
+    // take a snapshot
+    admin.snapshot(splitTableSnap, splitTableName);
+    // export snapshot and verify
+    Configuration tmpConf = TEST_UTIL.getConfiguration();
+    // Decrease the buffer size of copier to avoid the export task finished 
shortly
+    tmpConf.setInt("snapshot.export.buffer.size", 1);
+    // Decrease the maximum files of each mapper to ensure the three files(1 
hfile + 2 reference
+    // files)
+    // copied in different mappers concurrently.
+    tmpConf.setInt("snapshot.export.default.map.group", 1);
+    testExportFileSystemState(tmpConf, splitTableName, splitTableSnap, 
splitTableSnap,
+      tableNumFiles, TEST_UTIL.getDefaultRootDirPath(), 
getHdfsDestinationDir(), false, false,
+      getBypassRegionPredicate(), true, false);
+    // delete table
+    TEST_UTIL.deleteTable(splitTableName);
+  }
+
   @Test
   public void testExportFileSystemStateWithSkipTmp() throws Exception {
     TEST_UTIL.getConfiguration().setBoolean(ExportSnapshot.CONF_SKIP_TMP, 
true);

Reply via email to