This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new bd930ce31c8 HBASE-29783 Fix flaky
TestVerifyBucketCacheFile.testRetrieveFromFile … (#7613)
bd930ce31c8 is described below
commit bd930ce31c8035a2fab8495f663c86a79e63d882
Author: Duo Zhang <[email protected]>
AuthorDate: Wed Jan 21 00:02:29 2026 +0800
HBASE-29783 Fix flaky TestVerifyBucketCacheFile.testRetrieveFromFile …
(#7613)
Co-authored-by: Aman Poonia <[email protected]>
Signed-off-by: Wellington Chevreuil <[email protected]>
Reviewed-by: Umesh Kumar <[email protected]>
---
.../hadoop/hbase/io/hfile/bucket/BucketCache.java | 34 +++++++++++-----------
.../io/hfile/bucket/TestVerifyBucketCacheFile.java | 6 ++--
2 files changed, 19 insertions(+), 21 deletions(-)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 60ba9f32cd7..ba302d7c42c 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -57,7 +57,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Consumer;
import java.util.function.Function;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -229,8 +228,6 @@ public class BucketCache implements BlockCache, HeapSize {
/** Cache access count (sequential ID) */
private final AtomicLong accessCount = new AtomicLong();
- private static final int DEFAULT_CACHE_WAIT_TIME = 50;
-
private final BucketCacheStats cacheStats;
private final String persistencePath;
static AtomicBoolean isCacheInconsistent = new AtomicBoolean(false);
@@ -2385,7 +2382,7 @@ public class BucketCache implements BlockCache, HeapSize {
LOG.debug("Notifying caching completed for file {}, with total blocks {},
and data blocks {}",
fileName, totalBlockCount, dataBlockCount);
try {
- final MutableInt count = new MutableInt();
+ int count = 0;
LOG.debug("iterating over {} entries in the backing map",
backingMap.size());
Set<BlockCacheKey> result = getAllCacheKeysForFile(fileName.getName(),
0, Long.MAX_VALUE);
if (result.isEmpty() && StoreFileInfo.isReference(fileName)) {
@@ -2393,18 +2390,18 @@ public class BucketCache implements BlockCache,
HeapSize {
StoreFileInfo.getReferredToRegionAndFile(fileName.getName()).getSecond(), 0,
Long.MAX_VALUE);
}
- result.stream().forEach(entry -> {
+ for (BlockCacheKey entry : result) {
LOG.debug("found block for file {} in the backing map. Acquiring read
lock for offset {}",
fileName.getName(), entry.getOffset());
ReentrantReadWriteLock lock = offsetLock.getLock(entry.getOffset());
lock.readLock().lock();
locks.add(lock);
if (backingMap.containsKey(entry) && entry.getBlockType().isData()) {
- count.increment();
+ count++;
}
- });
+ }
// BucketCache would only have data blocks
- if (dataBlockCount == count.getValue()) {
+ if (dataBlockCount == count) {
LOG.debug("File {} has now been fully cached.", fileName);
fileCacheCompleted(fileName, size);
} else {
@@ -2412,7 +2409,7 @@ public class BucketCache implements BlockCache, HeapSize {
"Prefetch executor completed for {}, but only {} data blocks were
cached. "
+ "Total data blocks for file: {}. "
+ "Checking for blocks pending cache in cache writer queue.",
- fileName, count.getValue(), dataBlockCount);
+ fileName, count, dataBlockCount);
if (ramCache.hasBlocksForFile(fileName.getName())) {
for (ReentrantReadWriteLock lock : locks) {
lock.readLock().unlock();
@@ -2507,16 +2504,19 @@ public class BucketCache implements BlockCache,
HeapSize {
@Override
public boolean waitForCacheInitialization(long timeout) {
- try {
- while (cacheState == CacheState.INITIALIZING) {
- if (timeout <= 0) {
- break;
- }
+ while (cacheState == CacheState.INITIALIZING) {
+ if (timeout <= 0) {
+ break;
+ }
+ try {
Thread.sleep(100);
- timeout -= 100;
+ } catch (InterruptedException e) {
+ LOG.warn("Interrupted while waiting for cache initialization", e);
+ Thread.currentThread().interrupt();
+ break;
}
- } finally {
- return isCacheEnabled();
+ timeout -= 100;
}
+ return isCacheEnabled();
}
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
index 679b9098607..5ea88c0208a 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.Pair;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -138,7 +137,8 @@ public class TestVerifyBucketCacheFile {
recoveredBucketCache =
new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize,
constructedBlockSizes, writeThreads, writerQLen, testDir +
"/bucket.persistence");
- waitPersistentCacheValidation(conf, bucketCache);
+ assertTrue(recoveredBucketCache.waitForCacheInitialization(10000));
+ waitPersistentCacheValidation(conf, recoveredBucketCache);
assertEquals(0, recoveredBucketCache.getAllocator().getUsedSize());
assertEquals(0, recoveredBucketCache.backingMap.size());
BlockCacheKey[] newKeys = CacheTestUtils.regenerateKeys(blocks, names);
@@ -308,8 +308,6 @@ public class TestVerifyBucketCacheFile {
long usedSize = bucketCache.getAllocator().getUsedSize();
assertEquals(0, usedSize);
- Pair<String, Long> myPair = new Pair<>();
-
CacheTestUtils.HFileBlockPair[] blocks =
CacheTestUtils.generateHFileBlocks(constructedBlockSize, 1);
// Add blocks