This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch branch-2.5
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.5 by this push:
new becbc3f8b23 HBASE-27688 HFile splitting occurs during bulkload, the
CREATE_TIME_TS of hfileinfo is 0 (#5097)
becbc3f8b23 is described below
commit becbc3f8b23da8366e697d436b30bc6a9339f2a1
Author: alan.zhao <[email protected]>
AuthorDate: Wed Mar 15 08:17:18 2023 +0800
HBASE-27688 HFile splitting occurs during bulkload, the CREATE_TIME_TS of
hfileinfo is 0 (#5097)
Co-authored-by: alanzhao <[email protected]>
Signed-off-by: Duo Zhang <[email protected]>
(cherry picked from commit 6920e72f50b35eea79921f20dbe6a15f5cfdf492)
---
.../hadoop/hbase/tool/LoadIncrementalHFiles.java | 3 +-
.../hbase/tool/TestLoadIncrementalHFiles.java | 35 ++++++++++++++++++++++
2 files changed, 37 insertions(+), 1 deletion(-)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
index 54adfd22a36..91d80efab41 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
@@ -92,6 +92,7 @@ import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.FSVisitor;
import org.apache.hadoop.hbase.util.Pair;
@@ -1181,7 +1182,7 @@ public class LoadIncrementalHFiles extends Configured
implements Tool {
.withChecksumType(StoreUtils.getChecksumType(conf))
.withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(conf)).withBlockSize(blocksize)
.withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)
- .build();
+ .withCreateTime(EnvironmentEdgeManager.currentTime()).build();
halfWriter = new StoreFileWriter.Builder(conf, cacheConf,
fs).withFilePath(outFile)
.withBloomType(bloomFilterType).withFileContext(hFileContext).build();
HFileScanner scanner = halfReader.getScanner(false, false, false);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
index cef666942f8..833ce35edd0 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.tool;
+import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
@@ -61,6 +62,7 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.HFileTestUtil;
+import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -568,6 +570,39 @@ public class TestLoadIncrementalHFiles {
assertEquals(1000, rowCount);
}
+ /**
+ * This method tests that the create_time property of the HFile produced by
the splitstorefile
+ * method is greater than 0 HBASE-27688
+ */
+ @Test
+ public void testSplitStoreFileWithCreateTimeTS() throws IOException {
+ Path dir =
util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS");
+ FileSystem fs = util.getTestFileSystem();
+ Path testIn = new Path(dir, "testhfile");
+ ColumnFamilyDescriptor familyDesc =
ColumnFamilyDescriptorBuilder.of(FAMILY);
+ HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY,
QUALIFIER,
+ Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
+
+ Path bottomOut = new Path(dir, "bottom.out");
+ Path topOut = new Path(dir, "top.out");
+
+ BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn,
familyDesc,
+ Bytes.toBytes("ggg"), bottomOut, topOut);
+
+ verifyHFileCreateTimeTS(bottomOut);
+ verifyHFileCreateTimeTS(topOut);
+ }
+
+ private void verifyHFileCreateTimeTS(Path p) throws IOException {
+ Configuration conf = util.getConfiguration();
+
+ try (HFile.Reader reader =
+ HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf),
true, conf)) {
+ long fileCreateTime =
reader.getHFileInfo().getHFileContext().getFileCreateTime();
+ MatcherAssert.assertThat(fileCreateTime, greaterThan(0L));
+ }
+ }
+
@Test
public void testSplitStoreFileWithNoneToNone() throws IOException {
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE,
DataBlockEncoding.NONE);