Repository: hbase
Updated Branches:
  refs/heads/master 92c3b877c -> c60cfbc99


HBASE-11550 Custom value for BUCKET_CACHE_BUCKETS_KEY should be sorted (Gustavo 
Anatoly)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c60cfbc9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c60cfbc9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c60cfbc9

Branch: refs/heads/master
Commit: c60cfbc999e6ae2222064632e297fd361c8c16e4
Parents: 92c3b87
Author: Nick Dimiduk <[email protected]>
Authored: Wed Aug 13 21:07:31 2014 -0700
Committer: Nick Dimiduk <[email protected]>
Committed: Wed Aug 13 21:07:41 2014 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hbase/io/hfile/CacheConfig.java    |  2 +-
 .../hbase/io/hfile/bucket/BucketAllocator.java       | 15 +++++++--------
 .../hadoop/hbase/io/hfile/bucket/BucketCache.java    |  8 +++-----
 3 files changed, 11 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/c60cfbc9/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 51f3543..b8dd80e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -485,7 +485,7 @@ public class CacheConfig {
     if (configuredBucketSizes != null) {
       bucketSizes = new int[configuredBucketSizes.length];
       for (int i = 0; i < configuredBucketSizes.length; i++) {
-        bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i]);
+        bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
       }
     }
     BucketCache bucketCache = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c60cfbc9/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
index 831cd66..d0eb696 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
@@ -20,14 +20,12 @@
 
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
+import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
 
-import com.google.common.base.Objects;
-import com.google.common.base.Preconditions;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -36,6 +34,10 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.primitives.Ints;
+
 /**
  * This class is used to allocate a block with specified size and free the 
block
  * when evicting. It manages an array of buckets, each bucket is associated 
with
@@ -299,11 +301,8 @@ public final class BucketAllocator {
   BucketAllocator(long availableSpace, int[] bucketSizes)
       throws BucketAllocatorException {
     this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : 
bucketSizes;
-    int largestBucket = this.bucketSizes[0];
-    for (int i : this.bucketSizes) {
-      largestBucket = Math.max(largestBucket, i);
-    }
-    this.bigItemSize = largestBucket;
+    Arrays.sort(this.bucketSizes);
+    this.bigItemSize = Ints.max(this.bucketSizes);
     this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize;
     buckets = new Bucket[(int) (availableSpace / bucketCapacity)];
     if (buckets.length < this.bucketSizes.length)

http://git-wip-us.apache.org/repos/asf/hbase/blob/c60cfbc9/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 2ad1c71..86c8944 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
+import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
 import org.apache.hadoop.hbase.io.hfile.BlockPriority;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
 import org.apache.hadoop.hbase.io.hfile.CacheStats;
@@ -59,7 +60,6 @@ import org.apache.hadoop.hbase.io.hfile.Cacheable;
 import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
 import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
 import org.apache.hadoop.hbase.io.hfile.CachedBlock;
-import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
 import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache;
 import org.apache.hadoop.hbase.io.hfile.HFileBlock;
 import org.apache.hadoop.hbase.util.ConcurrentIndex;
@@ -166,7 +166,6 @@ public class BucketCache implements BlockCache, HeapSize {
   private long cacheCapacity;
   /** Approximate block size */
   private final long blockSize;
-  private final int[] bucketSizes;
 
   /** Duration of IO errors tolerated before we disable cache, 1 min as 
default */
   private final int ioErrorsTolerationDuration;
@@ -228,7 +227,6 @@ public class BucketCache implements BlockCache, HeapSize {
     this.cacheCapacity = capacity;
     this.persistencePath = persistencePath;
     this.blockSize = blockSize;
-    this.bucketSizes = bucketSizes;
     this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;
 
     bucketAllocator = new BucketAllocator(capacity, bucketSizes);
@@ -244,7 +242,7 @@ public class BucketCache implements BlockCache, HeapSize {
 
     if (ioEngine.isPersistent() && persistencePath != null) {
       try {
-        retrieveFromFile();
+        retrieveFromFile(bucketSizes);
       } catch (IOException ioex) {
         LOG.error("Can't restore from file because of", ioex);
       } catch (ClassNotFoundException cnfe) {
@@ -868,7 +866,7 @@ public class BucketCache implements BlockCache, HeapSize {
   }
 
   @SuppressWarnings("unchecked")
-  private void retrieveFromFile() throws IOException, BucketAllocatorException,
+  private void retrieveFromFile(int[] bucketSizes) throws IOException, 
BucketAllocatorException,
       ClassNotFoundException {
     File persistenceFile = new File(persistencePath);
     if (!persistenceFile.exists()) {

Reply via email to