This is an automated email from the ASF dual-hosted git repository. wchevreuil pushed a commit to branch branch-2 in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2 by this push: new 500548abbd7 HBASE-29627 Handle any block cache fetching errors when reading a block in HFileReaderImpl (#7341) (#7344) 500548abbd7 is described below commit 500548abbd731ea0df50c179fefc01edf569be9d Author: Wellington Ramos Chevreuil <wchevre...@apache.org> AuthorDate: Fri Sep 26 10:28:14 2025 +0100 HBASE-29627 Handle any block cache fetching errors when reading a block in HFileReaderImpl (#7341) (#7344) Signed-off-by: Peter Somogyi <psomo...@apache.org> --- .../hadoop/hbase/io/hfile/HFileReaderImpl.java | 26 ++++++++++++++++++++++ .../hadoop/hbase/io/hfile/TestHFileReaderImpl.java | 22 ++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index c9b843c999a..db183bb7177 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -1181,6 +1181,32 @@ public abstract class HFileReaderImpl implements HFile.Reader, Configurable { } return cachedBlock; } + } catch (Exception e) { + if (cachedBlock != null) { + returnAndEvictBlock(cache, cacheKey, cachedBlock); + } + LOG.warn("Failed retrieving block from cache with key {}. " + + "\n Evicting this block from cache and will read it from file system. " + + "\n Exception details: ", cacheKey, e); + if (LOG.isDebugEnabled()) { + LOG.debug("Further tracing details for failed block cache retrieval:" + + "\n Complete File path - {}," + "\n Expected Block Type - {}, Actual Block Type - {}," + + "\n Cache compressed - {}" + "\n Header size (after deserialized from cache) - {}" + + "\n Size with header - {}" + "\n Uncompressed size without header - {} " + + "\n Total byte buffer size - {}" + "\n Encoding code - {}", this.path, + expectedBlockType, (cachedBlock != null ? cachedBlock.getBlockType() : "N/A"), + (expectedBlockType != null + ? cacheConf.shouldCacheCompressed(expectedBlockType.getCategory()) + : "N/A"), + (cachedBlock != null ? cachedBlock.headerSize() : "N/A"), + (cachedBlock != null ? cachedBlock.getOnDiskSizeWithHeader() : "N/A"), + (cachedBlock != null ? cachedBlock.getUncompressedSizeWithoutHeader() : "N/A"), + (cachedBlock != null ? cachedBlock.getBufferReadOnly().limit() : "N/A"), + (cachedBlock != null + ? cachedBlock.getBufferReadOnly().getShort(cachedBlock.headerSize()) + : "N/A")); + } + return null; } finally { // Count bytes read as cached block is being returned if (isScanMetricsEnabled && cachedBlock != null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java index 42f2cf5ebd9..1eb5ac02607 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java @@ -18,7 +18,12 @@ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; @@ -116,6 +121,23 @@ public class TestHFileReaderImpl { } } + @Test + public void testReadWorksWhenCacheCorrupt() throws Exception { + BlockCache mockedCache = mock(BlockCache.class); + when(mockedCache.getBlock(any(), anyBoolean(), anyBoolean(), anyBoolean())) + .thenThrow(new RuntimeException("Injected error")); + Path p = makeNewFile(); + FileSystem fs = TEST_UTIL.getTestFileSystem(); + Configuration conf = TEST_UTIL.getConfiguration(); + HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf, mockedCache), true, conf); + long offset = 0; + while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) { + HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null, false); + assertNotNull(block); + offset += block.getOnDiskSizeWithHeader(); + } + } + @Test public void testSeekBefore() throws Exception { Path p = makeNewFile();