This is an automated email from the ASF dual-hosted git repository.

wchevreuil pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-3 by this push:
     new 26a052aaa26 HBASE-28217 PrefetchExecutor should not run for files from 
CFs that have disabled BLOCKCACHE (#5535)
26a052aaa26 is described below

commit 26a052aaa26f63afca91acda218f4d77595f663f
Author: Wellington Ramos Chevreuil <wchevre...@apache.org>
AuthorDate: Tue Nov 28 13:20:15 2023 +0000

    HBASE-28217 PrefetchExecutor should not run for files from CFs that have 
disabled BLOCKCACHE (#5535)
    
    Signed-off-by: Peter Somogyi <psomo...@apache.org>
---
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  |  2 +-
 .../hadoop/hbase/io/hfile/PrefetchExecutor.java    |  5 +++
 .../apache/hadoop/hbase/io/hfile/TestPrefetch.java | 52 +++++++++++++++++++++-
 3 files changed, 56 insertions(+), 3 deletions(-)

diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 57f91fa19f4..4587eced616 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -343,7 +343,7 @@ public class CacheConfig {
 
   /** Returns true if blocks should be prefetched into the cache on open, 
false if not */
   public boolean shouldPrefetchOnOpen() {
-    return this.prefetchOnOpen;
+    return this.prefetchOnOpen && this.cacheDataOnRead;
   }
 
   /** Returns true if blocks should be cached while writing during compaction, 
false if not */
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
index 02fbc12e85c..4ae19193c8a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
@@ -129,4 +129,9 @@ public final class PrefetchExecutor {
 
   private PrefetchExecutor() {
   }
+
+  /* Visible for testing only */
+  static ScheduledExecutorService getExecutorPool() {
+    return prefetchExecutorPool;
+  }
 }
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
index b58319179c5..0b45a930dce 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
@@ -26,6 +26,7 @@ import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.hasItem;
 import static org.hamcrest.Matchers.hasItems;
 import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -35,6 +36,7 @@ import io.opentelemetry.sdk.trace.data.SpanData;
 import java.io.IOException;
 import java.util.List;
 import java.util.Random;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.TimeUnit;
 import java.util.function.BiConsumer;
@@ -120,6 +122,40 @@ public class TestPrefetch {
     assertTrue(cc.shouldPrefetchOnOpen());
   }
 
+  @Test
+  public void testPrefetchBlockCacheDisabled() throws Exception {
+    ScheduledThreadPoolExecutor poolExecutor =
+      (ScheduledThreadPoolExecutor) PrefetchExecutor.getExecutorPool();
+    long totalCompletedBefore = poolExecutor.getCompletedTaskCount();
+    long queueBefore = poolExecutor.getQueue().size();
+    ColumnFamilyDescriptor columnFamilyDescriptor =
+      
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f")).setPrefetchBlocksOnOpen(true)
+        .setBlockCacheEnabled(false).build();
+    HFileContext meta = new 
HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build();
+    CacheConfig cacheConfig =
+      new CacheConfig(conf, columnFamilyDescriptor, blockCache, 
ByteBuffAllocator.HEAP);
+    Path storeFile = writeStoreFile("testPrefetchBlockCacheDisabled", meta, 
cacheConfig);
+    readStoreFile(storeFile, (r, o) -> {
+      HFileBlock block = null;
+      try {
+        block = r.readBlock(o, -1, false, true, false, true, null, null);
+      } catch (IOException e) {
+        fail(e.getMessage());
+      }
+      return block;
+    }, (key, block) -> {
+      boolean isCached = blockCache.getBlock(key, true, false, true) != null;
+      if (
+        block.getBlockType() == BlockType.DATA || block.getBlockType() == 
BlockType.ROOT_INDEX
+          || block.getBlockType() == BlockType.INTERMEDIATE_INDEX
+      ) {
+        assertFalse(isCached);
+      }
+    }, cacheConfig);
+    assertEquals(totalCompletedBefore + queueBefore,
+      poolExecutor.getCompletedTaskCount() + poolExecutor.getQueue().size());
+  }
+
   @Test
   public void testPrefetch() throws Exception {
     TraceUtil.trace(() -> {
@@ -212,8 +248,15 @@ public class TestPrefetch {
   private void readStoreFile(Path storeFilePath,
     BiFunction<HFile.Reader, Long, HFileBlock> readFunction,
     BiConsumer<BlockCacheKey, HFileBlock> validationFunction) throws Exception 
{
+    readStoreFile(storeFilePath, readFunction, validationFunction, cacheConf);
+  }
+
+  private void readStoreFile(Path storeFilePath,
+    BiFunction<HFile.Reader, Long, HFileBlock> readFunction,
+    BiConsumer<BlockCacheKey, HFileBlock> validationFunction, CacheConfig 
cacheConfig)
+    throws Exception {
     // Open the file
-    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, 
true, conf);
+    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConfig, 
true, conf);
 
     while (!reader.prefetchComplete()) {
       // Sleep for a bit
@@ -350,8 +393,13 @@ public class TestPrefetch {
   }
 
   private Path writeStoreFile(String fname, HFileContext context) throws 
IOException {
+    return writeStoreFile(fname, context, cacheConf);
+  }
+
+  private Path writeStoreFile(String fname, HFileContext context, CacheConfig 
cacheConfig)
+    throws IOException {
     Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);
-    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)
+    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConfig, fs)
       .withOutputDir(storeFileParentDir).withFileContext(context).build();
     Random rand = ThreadLocalRandom.current();
     final int rowLen = 32;

Reply via email to