This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new 52d3824bd8d HBASE-27688 HFile splitting occurs during bulkload, the
CREATE_TIME_TS of hfileinfo is 0 (#5097)
52d3824bd8d is described below
commit 52d3824bd8d11b461d8826dc9bc22b024cd49ff1
Author: alan.zhao <[email protected]>
AuthorDate: Wed Mar 15 08:17:18 2023 +0800
HBASE-27688 HFile splitting occurs during bulkload, the CREATE_TIME_TS of
hfileinfo is 0 (#5097)
Co-authored-by: alanzhao <[email protected]>
Signed-off-by: Duo Zhang <[email protected]>
(cherry picked from commit 6920e72f50b35eea79921f20dbe6a15f5cfdf492)
---
.../hadoop/hbase/tool/LoadIncrementalHFiles.java | 3 +-
.../hbase/tool/TestLoadIncrementalHFiles.java | 35 ++++++++++++++++++++++
2 files changed, 37 insertions(+), 1 deletion(-)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
index 7842e21d076..ee60f30b674 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
@@ -92,6 +92,7 @@ import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.FSVisitor;
import org.apache.hadoop.hbase.util.Pair;
@@ -1170,7 +1171,7 @@ public class LoadIncrementalHFiles extends Configured
implements Tool {
.withChecksumType(StoreUtils.getChecksumType(conf))
.withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(conf)).withBlockSize(blocksize)
.withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)
- .build();
+ .withCreateTime(EnvironmentEdgeManager.currentTime()).build();
halfWriter = new StoreFileWriter.Builder(conf, cacheConf,
fs).withFilePath(outFile)
.withBloomType(bloomFilterType).withFileContext(hFileContext).build();
HFileScanner scanner = halfReader.getScanner(false, false, false);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
index 5f07e6440cd..d254735d255 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.tool;
+import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -60,6 +61,7 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.HFileTestUtil;
+import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -567,6 +569,39 @@ public class TestLoadIncrementalHFiles {
assertEquals(1000, rowCount);
}
+ /**
+ * This method tests that the create_time property of the HFile produced by
the splitstorefile
+ * method is greater than 0 HBASE-27688
+ */
+ @Test
+ public void testSplitStoreFileWithCreateTimeTS() throws IOException {
+ Path dir =
util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS");
+ FileSystem fs = util.getTestFileSystem();
+ Path testIn = new Path(dir, "testhfile");
+ ColumnFamilyDescriptor familyDesc =
ColumnFamilyDescriptorBuilder.of(FAMILY);
+ HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY,
QUALIFIER,
+ Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
+
+ Path bottomOut = new Path(dir, "bottom.out");
+ Path topOut = new Path(dir, "top.out");
+
+ BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn,
familyDesc,
+ Bytes.toBytes("ggg"), bottomOut, topOut);
+
+ verifyHFileCreateTimeTS(bottomOut);
+ verifyHFileCreateTimeTS(topOut);
+ }
+
+ private void verifyHFileCreateTimeTS(Path p) throws IOException {
+ Configuration conf = util.getConfiguration();
+
+ try (HFile.Reader reader =
+ HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf),
true, conf)) {
+ long fileCreateTime =
reader.getHFileInfo().getHFileContext().getFileCreateTime();
+ MatcherAssert.assertThat(fileCreateTime, greaterThan(0L));
+ }
+ }
+
@Test
public void testSplitStoreFileWithNoneToNone() throws IOException {
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE,
DataBlockEncoding.NONE);