Repository: hbase
Updated Branches:
refs/heads/branch-1 65375f825 -> cd59a023c
HBASE-11550 Custom value for BUCKET_CACHE_BUCKETS_KEY should be sorted (Gustavo
Anatoly)
Conflicts:
hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cd59a023
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cd59a023
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cd59a023
Branch: refs/heads/branch-1
Commit: cd59a023c4f84ba458b9665e1a12add2f702ad96
Parents: 65375f8
Author: Nick Dimiduk <[email protected]>
Authored: Wed Aug 13 21:07:31 2014 -0700
Committer: Nick Dimiduk <[email protected]>
Committed: Wed Aug 13 21:13:05 2014 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/io/hfile/CacheConfig.java | 2 +-
.../hbase/io/hfile/bucket/BucketAllocator.java | 15 +++++++--------
.../hadoop/hbase/io/hfile/bucket/BucketCache.java | 8 +++-----
3 files changed, 11 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/cd59a023/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
----------------------------------------------------------------------
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index bf84222..82bbeee 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -484,7 +484,7 @@ public class CacheConfig {
if (configuredBucketSizes != null) {
bucketSizes = new int[configuredBucketSizes.length];
for (int i = 0; i < configuredBucketSizes.length; i++) {
- bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i]);
+ bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
}
}
BucketCache bucketCache = null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/cd59a023/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
----------------------------------------------------------------------
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
index cf075ff..c7a88d2 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
@@ -21,13 +21,11 @@
package org.apache.hadoop.hbase.io.hfile.bucket;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
-import com.google.common.base.Objects;
-import com.google.common.base.Preconditions;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -36,6 +34,10 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.primitives.Ints;
+
/**
* This class is used to allocate a block with specified size and free the
block
* when evicting. It manages an array of buckets, each bucket is associated
with
@@ -299,11 +301,8 @@ public final class BucketAllocator {
BucketAllocator(long availableSpace, int[] bucketSizes)
throws BucketAllocatorException {
this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES :
bucketSizes;
- int largestBucket = this.bucketSizes[0];
- for (int i : this.bucketSizes) {
- largestBucket = Math.max(largestBucket, i);
- }
- this.bigItemSize = largestBucket;
+ Arrays.sort(this.bucketSizes);
+ this.bigItemSize = Ints.max(this.bucketSizes);
this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize;
buckets = new Bucket[(int) (availableSpace / bucketCapacity)];
if (buckets.length < this.bucketSizes.length)
http://git-wip-us.apache.org/repos/asf/hbase/blob/cd59a023/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
----------------------------------------------------------------------
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 97d515a..e110811 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
+import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
import org.apache.hadoop.hbase.io.hfile.BlockPriority;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.CacheStats;
@@ -59,7 +60,6 @@ import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
-import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache;
import org.apache.hadoop.hbase.io.hfile.HFileBlock;
import org.apache.hadoop.hbase.util.ConcurrentIndex;
@@ -166,7 +166,6 @@ public class BucketCache implements BlockCache, HeapSize {
private long cacheCapacity;
/** Approximate block size */
private final long blockSize;
- private final int[] bucketSizes;
/** Duration of IO errors tolerated before we disable cache, 1 min as
default */
private final int ioErrorsTolerationDuration;
@@ -228,7 +227,6 @@ public class BucketCache implements BlockCache, HeapSize {
this.cacheCapacity = capacity;
this.persistencePath = persistencePath;
this.blockSize = blockSize;
- this.bucketSizes = bucketSizes;
this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;
bucketAllocator = new BucketAllocator(capacity, bucketSizes);
@@ -244,7 +242,7 @@ public class BucketCache implements BlockCache, HeapSize {
if (ioEngine.isPersistent() && persistencePath != null) {
try {
- retrieveFromFile();
+ retrieveFromFile(bucketSizes);
} catch (IOException ioex) {
LOG.error("Can't restore from file because of", ioex);
} catch (ClassNotFoundException cnfe) {
@@ -866,7 +864,7 @@ public class BucketCache implements BlockCache, HeapSize {
}
@SuppressWarnings("unchecked")
- private void retrieveFromFile() throws IOException, BucketAllocatorException,
+ private void retrieveFromFile(int[] bucketSizes) throws IOException,
BucketAllocatorException,
ClassNotFoundException {
File persistenceFile = new File(persistencePath);
if (!persistenceFile.exists()) {