Repository: hbase
Updated Branches:
  refs/heads/branch-1 cfd5b6b59 -> 9036556a3


HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic 
when configuring hbase.bucketcache.bucket.sizes.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9036556a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9036556a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9036556a

Branch: refs/heads/branch-1
Commit: 9036556a33c356225814c3ca50ecc09997269ea5
Parents: cfd5b6b
Author: anoopsamjohn <[email protected]>
Authored: Thu Jul 20 23:00:48 2017 +0530
Committer: anoopsamjohn <[email protected]>
Committed: Thu Jul 20 23:00:48 2017 +0530

----------------------------------------------------------------------
 hbase-common/src/main/resources/hbase-default.xml  |  2 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java  | 17 +++++++++++++----
 .../hadoop/hbase/io/hfile/TestCacheConfig.java     | 14 ++++++++++++++
 3 files changed, 28 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-common/src/main/resources/hbase-default.xml
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 3c4044d..a29e9c8 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -848,7 +848,7 @@ possible configurations would overwhelm and obscure the 
important.
     <description>A comma-separated list of sizes for buckets for the 
bucketcache.
     Can be multiple sizes. List block sizes in order from smallest to largest.
     The sizes you use will depend on your data access patterns.
-    Must be a multiple of 1024 else you will run into
+    Must be a multiple of 256 else you will run into
     'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
     If you specify no values here, then you pick up the default bucketsizes set
     in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 340236b..57d2057 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -551,7 +551,8 @@ public class CacheConfig {
    * @return Returns L2 block cache instance (for now it is BucketCache 
BlockCache all the time)
    * or null if not supposed to be a L2.
    */
-  private static BlockCache getL2(final Configuration c) {
+  @VisibleForTesting
+  static BlockCache getL2(final Configuration c) {
     final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, 
EXTERNAL_BLOCKCACHE_DEFAULT);
     if (LOG.isDebugEnabled()) {
       LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " 
l2 cache");
@@ -561,10 +562,8 @@ public class CacheConfig {
     if (useExternal) {
       return getExternalBlockcache(c);
     }
-
     // otherwise use the bucket cache.
     return getBucketCache(c);
-
   }
 
   private static BlockCache getExternalBlockcache(Configuration c) {
@@ -618,7 +617,17 @@ public class CacheConfig {
     if (configuredBucketSizes != null) {
       bucketSizes = new int[configuredBucketSizes.length];
       for (int i = 0; i < configuredBucketSizes.length; i++) {
-        bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
+        int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());
+        if (bucketSize % 256 != 0) {
+          // We need all the bucket sizes to be multiples of 256. Having all 
the configured bucket
+          // sizes to be multiples of 256 will ensure that the block offsets 
within buckets,
+          // that are calculated, will also be multiples of 256.
+          // See BucketEntry where offset to each block is represented using 5 
bytes (instead of 8
+          // bytes long). We would like to save heap overhead as less as 
possible.
+          throw new IllegalArgumentException("Illegal value: " + bucketSize + 
" configured for '"
+              + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be 
multiples of 256");
+        }
+        bucketSizes[i] = bucketSize;
       }
     }
     BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
index 0175224..3f60cd7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.hfile;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
@@ -336,6 +337,19 @@ public class TestCacheConfig {
     assertDataBlockCount(lrubc, 1);
   }
 
+  @Test
+  public void testL2CacheWithInvalidBucketSize() {
+    Configuration c = new Configuration(this.conf);
+    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
+    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");
+    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);
+    try {
+      CacheConfig.getL2(c);
+      fail("Should throw IllegalArgumentException when passing illegal value 
for bucket size");
+    } catch (IllegalArgumentException e) {
+    }
+  }
+
   private void assertDataBlockCount(final LruBlockCache bc, final int 
expected) {
     Map<BlockType, Integer> blocks = bc.getBlockTypeCountsForTest();
     assertEquals(expected, blocks == null? 0:

Reply via email to