hbase git commit: HBASE-15136 Explore different queuing behaviors while busy

2016-02-24 Thread antonov
Repository: hbase
Updated Branches:
  refs/heads/branch-1 13773254c -> 04a3b2733


HBASE-15136 Explore different queuing behaviors while busy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04a3b273
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04a3b273
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04a3b273

Branch: refs/heads/branch-1
Commit: 04a3b27330839a33d75f71ac2d9997abc3c83957
Parents: 1377325
Author: Mikhail Antonov 
Authored: Wed Feb 24 20:40:44 2016 -0800
Committer: Mikhail Antonov 
Committed: Wed Feb 24 20:42:23 2016 -0800

--
 .../hadoop/hbase/util/ReflectionUtils.java  |   1 +
 .../hbase/ipc/MetricsHBaseServerSource.java |   6 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java|   2 +
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |   6 +-
 .../hbase/ipc/AdaptiveLifoCoDelCallQueue.java   | 329 +++
 .../hadoop/hbase/ipc/FifoRpcScheduler.java  |  10 +
 .../ipc/MetricsHBaseServerWrapperImpl.java  |  16 +
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.java|  10 +
 .../apache/hadoop/hbase/ipc/RpcScheduler.java   |  13 +
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|  70 +++-
 .../ipc/MetricsHBaseServerWrapperStub.java  |  10 +
 .../hbase/ipc/TestSimpleRpcScheduler.java   |  63 
 12 files changed, 534 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04a3b273/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index 650c544..15b3930 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -85,6 +85,7 @@ public class ReflectionUtils {
 match = (!ctorParamTypes[i].isPrimitive()) ? 
ctorParamTypes[i].isAssignableFrom(paramType) :
   ((int.class.equals(ctorParamTypes[i]) && 
Integer.class.equals(paramType)) ||
(long.class.equals(ctorParamTypes[i]) && 
Long.class.equals(paramType)) ||
+   (double.class.equals(ctorParamTypes[i]) && 
Double.class.equals(paramType)) ||
(char.class.equals(ctorParamTypes[i]) && 
Character.class.equals(paramType)) ||
(short.class.equals(ctorParamTypes[i]) && 
Short.class.equals(paramType)) ||
(boolean.class.equals(ctorParamTypes[i]) && 
Boolean.class.equals(paramType)) ||

http://git-wip-us.apache.org/repos/asf/hbase/blob/04a3b273/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
index 061a672..bb89789 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
@@ -64,6 +64,12 @@ public interface MetricsHBaseServerSource extends BaseSource 
{
   String NUM_OPEN_CONNECTIONS_DESC = "Number of open connections.";
   String NUM_ACTIVE_HANDLER_NAME = "numActiveHandler";
   String NUM_ACTIVE_HANDLER_DESC = "Number of active rpc handlers.";
+  String NUM_GENERAL_CALLS_DROPPED_NAME = "numGeneralCallsDropped";
+  String NUM_GENERAL_CALLS_DROPPED_DESC = "Total number of calls in general 
queue which " +
+"were dropped by CoDel RPC executor";
+  String NUM_LIFO_MODE_SWITCHES_NAME = "numLifoModeSwitches";
+  String NUM_LIFO_MODE_SWITCHES_DESC = "Total number of calls in general queue 
which " +
+"were served from the tail of the queue";
 
   String EXCEPTIONS_NAME="exceptions";
   String EXCEPTIONS_DESC="Exceptions caused by requests";

http://git-wip-us.apache.org/repos/asf/hbase/blob/04a3b273/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
index 1885264..8f30205 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
@@ 

hbase git commit: HBASE-15136 Explore different queuing behaviors while busy

2016-02-24 Thread antonov
Repository: hbase
Updated Branches:
  refs/heads/master 6e9d355b1 -> 43f99def6


HBASE-15136 Explore different queuing behaviors while busy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/43f99def
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/43f99def
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/43f99def

Branch: refs/heads/master
Commit: 43f99def670551cfe314c44181c0cb9570cdaaa3
Parents: 6e9d355
Author: Mikhail Antonov 
Authored: Wed Feb 24 20:40:44 2016 -0800
Committer: Mikhail Antonov 
Committed: Wed Feb 24 20:41:30 2016 -0800

--
 .../hadoop/hbase/util/ReflectionUtils.java  |   1 +
 .../hbase/ipc/MetricsHBaseServerSource.java |   6 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java|   2 +
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |   6 +-
 .../hbase/ipc/AdaptiveLifoCoDelCallQueue.java   | 329 +++
 .../hadoop/hbase/ipc/FifoRpcScheduler.java  |  10 +
 .../ipc/MetricsHBaseServerWrapperImpl.java  |  16 +
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.java|  10 +
 .../apache/hadoop/hbase/ipc/RpcScheduler.java   |  13 +
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|  70 +++-
 .../ipc/MetricsHBaseServerWrapperStub.java  |  10 +
 .../hbase/ipc/TestSimpleRpcScheduler.java   |  63 
 12 files changed, 534 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/43f99def/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index 650c544..15b3930 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -85,6 +85,7 @@ public class ReflectionUtils {
 match = (!ctorParamTypes[i].isPrimitive()) ? 
ctorParamTypes[i].isAssignableFrom(paramType) :
   ((int.class.equals(ctorParamTypes[i]) && 
Integer.class.equals(paramType)) ||
(long.class.equals(ctorParamTypes[i]) && 
Long.class.equals(paramType)) ||
+   (double.class.equals(ctorParamTypes[i]) && 
Double.class.equals(paramType)) ||
(char.class.equals(ctorParamTypes[i]) && 
Character.class.equals(paramType)) ||
(short.class.equals(ctorParamTypes[i]) && 
Short.class.equals(paramType)) ||
(boolean.class.equals(ctorParamTypes[i]) && 
Boolean.class.equals(paramType)) ||

http://git-wip-us.apache.org/repos/asf/hbase/blob/43f99def/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
index 061a672..bb89789 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
@@ -64,6 +64,12 @@ public interface MetricsHBaseServerSource extends BaseSource 
{
   String NUM_OPEN_CONNECTIONS_DESC = "Number of open connections.";
   String NUM_ACTIVE_HANDLER_NAME = "numActiveHandler";
   String NUM_ACTIVE_HANDLER_DESC = "Number of active rpc handlers.";
+  String NUM_GENERAL_CALLS_DROPPED_NAME = "numGeneralCallsDropped";
+  String NUM_GENERAL_CALLS_DROPPED_DESC = "Total number of calls in general 
queue which " +
+"were dropped by CoDel RPC executor";
+  String NUM_LIFO_MODE_SWITCHES_NAME = "numLifoModeSwitches";
+  String NUM_LIFO_MODE_SWITCHES_DESC = "Total number of calls in general queue 
which " +
+"were served from the tail of the queue";
 
   String EXCEPTIONS_NAME="exceptions";
   String EXCEPTIONS_DESC="Exceptions caused by requests";

http://git-wip-us.apache.org/repos/asf/hbase/blob/43f99def/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
index 1885264..8f30205 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
@@ -26,4 

hbase git commit: HBASE-15264 Implement a fan out HDFS OutputStream

2016-02-24 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master a3b4575f7 -> 6e9d355b1


HBASE-15264 Implement a fan out HDFS OutputStream


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e9d355b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e9d355b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e9d355b

Branch: refs/heads/master
Commit: 6e9d355b12a1e666f4d05be02775a01b6754d063
Parents: a3b4575
Author: zhangduo 
Authored: Wed Feb 24 20:47:38 2016 +0800
Committer: zhangduo 
Committed: Thu Feb 25 10:07:27 2016 +0800

--
 .../util/FanOutOneBlockAsyncDFSOutput.java  | 533 +++
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 672 +++
 ...anOutOneBlockAsyncDFSOutputFlushHandler.java |  61 ++
 .../util/TestFanOutOneBlockAsyncDFSOutput.java  | 190 ++
 4 files changed, 1456 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9d355b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
new file mode 100644
index 000..b10f180
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
@@ -0,0 +1,533 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.HEART_BEAT_SEQNO;
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.completeFile;
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.endFileLease;
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.nio.channels.CompletionHandler;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
+import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
+import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
+import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
+import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
+import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
+import org.apache.hadoop.util.DataChecksum;
+
+import com.google.common.base.Supplier;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.EventLoop;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.protobuf.ProtobufDecoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import io.netty.handler.timeout.IdleState;
+import io.netty.handler.timeout.IdleStateEvent;
+import io.netty.handler.timeout.IdleStateHandler;
+import io.netty.util.concurrent.Future;
+import io.netty.util.concurrent.FutureListener;
+import io.netty.util.concurrent.Promise;
+
+/**
+ * An 

hbase git commit: HBASE-15319 clearJmxCache does not take effect actually

2016-02-24 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/master 630a65825 -> a3b4575f7


HBASE-15319 clearJmxCache does not take effect actually


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a3b4575f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a3b4575f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a3b4575f

Branch: refs/heads/master
Commit: a3b4575f700c20167a6ab0b774d8a2c9cd3916af
Parents: 630a658
Author: Elliott Clark 
Authored: Wed Feb 24 09:02:06 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 16:29:05 2016 -0800

--
 .../main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a3b4575f/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 95734ba..8fcf623 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -52,7 +52,7 @@ public class JmxCacheBuster {
   public static void clearJmxCache() {
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
-if ((future == null || (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
+if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }



hbase git commit: HBASE-15319 clearJmxCache does not take effect actually

2016-02-24 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/branch-1 3352173ec -> 13773254c


HBASE-15319 clearJmxCache does not take effect actually


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13773254
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13773254
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13773254

Branch: refs/heads/branch-1
Commit: 13773254c87994c6cffdb3fbc48521d51c86d8c5
Parents: 3352173
Author: Elliott Clark 
Authored: Wed Feb 24 09:02:06 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 16:29:12 2016 -0800

--
 .../main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/13773254/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 95734ba..8fcf623 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -52,7 +52,7 @@ public class JmxCacheBuster {
   public static void clearJmxCache() {
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
-if ((future == null || (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
+if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }



hbase git commit: HBASE-15319 clearJmxCache does not take effect actually

2016-02-24 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 f502f4ac9 -> 4710463d2


HBASE-15319 clearJmxCache does not take effect actually


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4710463d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4710463d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4710463d

Branch: refs/heads/branch-1.2
Commit: 4710463d2d7191425cc54ebe4d62e3f4ea6672dd
Parents: f502f4a
Author: Elliott Clark 
Authored: Wed Feb 24 09:02:06 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 16:29:22 2016 -0800

--
 .../main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4710463d/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 95734ba..8fcf623 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -52,7 +52,7 @@ public class JmxCacheBuster {
   public static void clearJmxCache() {
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
-if ((future == null || (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
+if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }



[3/3] hbase git commit: HBASE-15222 Use less contended classes for metrics

2016-02-24 Thread eclark
HBASE-15222 Use less contended classes for metrics

Summary:
Use less contended things for metrics.
For histogram which was the largest culprit we use FastLongHistogram
For atomic long where possible we now use counter.

Test Plan: unit tests

Reviewers:

Subscribers:

Differential Revision: https://reviews.facebook.net/D54381


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3352173e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3352173e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3352173e

Branch: refs/heads/branch-1
Commit: 3352173ec87debdfe115755768c3f9c0ca82bb0e
Parents: b4f747f
Author: Elliott Clark 
Authored: Thu Feb 18 09:54:05 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 14:47:00 2016 -0800

--
 .../hadoop/hbase/util/FastLongHistogram.java| 162 ++-
 .../hbase/util/TestFastLongHistogram.java   |  37 -
 .../apache/hadoop/hbase/metrics/BaseSource.java |   8 -
 .../apache/hadoop/metrics2/MetricHistogram.java |   3 +
 hbase-hadoop2-compat/pom.xml|   8 +-
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |  50 +++---
 .../MetricsAssignmentManagerSourceImpl.java |  10 +-
 .../MetricsMasterFilesystemSourceImpl.java  |  14 +-
 .../hbase/master/MetricsMasterSourceImpl.java   |   4 +-
 .../hbase/master/MetricsSnapshotSourceImpl.java |   8 +-
 .../balancer/MetricsBalancerSourceImpl.java |   8 +-
 .../hadoop/hbase/metrics/BaseSourceImpl.java|  17 +-
 .../MetricsRegionServerSourceImpl.java  |  16 +-
 .../regionserver/MetricsRegionSourceImpl.java   |  24 +--
 .../regionserver/wal/MetricsWALSourceImpl.java  |  10 +-
 .../MetricsReplicationGlobalSourceSource.java   |  36 ++---
 .../MetricsReplicationSinkSourceImpl.java   |  16 +-
 .../MetricsReplicationSourceSourceImpl.java |  36 ++---
 .../hbase/rest/MetricsRESTSourceImpl.java   |  38 ++---
 .../thrift/MetricsThriftServerSourceImpl.java   |  13 +-
 .../metrics2/lib/DynamicMetricsRegistry.java| 103 ++--
 .../metrics2/lib/MetricMutableQuantiles.java| 154 --
 .../metrics2/lib/MetricsExecutorImpl.java   |   2 +-
 .../hadoop/metrics2/lib/MutableFastCounter.java |  60 +++
 .../hadoop/metrics2/lib/MutableHistogram.java   | 135 ++--
 .../metrics2/lib/MutableRangeHistogram.java |  75 -
 .../metrics2/lib/MutableSizeHistogram.java  |  25 ++-
 .../metrics2/lib/MutableTimeHistogram.java  |  23 ++-
 .../hbase/metrics/TestBaseSourceImpl.java   |   5 +-
 .../tmpl/regionserver/BlockCacheTmpl.jamon  |   8 -
 .../tmpl/regionserver/BlockCacheViewTmpl.jamon  |   1 -
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   |   1 -
 .../hadoop/hbase/io/hfile/AgeSnapshot.java  |  38 ++---
 .../hadoop/hbase/io/hfile/BlockCacheUtil.java   |  32 ++--
 .../hadoop/hbase/io/hfile/CacheStats.java   |  53 +++---
 .../org/apache/hadoop/hbase/io/hfile/HFile.java |  10 +-
 .../hadoop/hbase/io/hfile/HFileBlock.java   |   2 +-
 .../hadoop/hbase/io/hfile/HFileReaderV2.java|   4 +-
 .../hbase/io/hfile/bucket/BucketCacheStats.java |  11 +-
 39 files changed, 563 insertions(+), 697 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3352173e/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
index 623cbdb..78b2bf0 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util;
 
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicLongArray;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
@@ -31,11 +30,20 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class FastLongHistogram {
+
+  /**
+   * Default number of bins.
+   */
+  public static final int DEFAULT_NBINS = 255;
+
+  public static final double[] DEFAULT_QUANTILES =
+  new double[]{0.25, 0.5, 0.75, 0.90, 0.95, 0.98, 0.99, 0.999};
+
   /**
* Bins is a class containing a list of buckets(or bins) for estimation 
histogram of some data.
*/
   private static class Bins {
-private final AtomicLongArray counts;
+private final Counter[] counts;
 // inclusive
 private final 

[2/2] hbase git commit: HBASE-15222 Use less contended classes for metrics

2016-02-24 Thread eclark
HBASE-15222 Use less contended classes for metrics

Summary:
Use less contended things for metrics.
For histogram which was the largest culprit we use FastLongHistogram
For atomic long where possible we now use counter.

Test Plan: unit tests

Reviewers:

Subscribers:

Differential Revision: https://reviews.facebook.net/D54381


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/630a6582
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/630a6582
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/630a6582

Branch: refs/heads/master
Commit: 630a65825ed9a9c00f72bbfcac0588e1ab0cdd72
Parents: 20e14f4
Author: Elliott Clark 
Authored: Thu Feb 18 09:54:05 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 14:34:05 2016 -0800

--
 .../hadoop/hbase/util/FastLongHistogram.java| 162 ++-
 .../hbase/util/TestFastLongHistogram.java   |  32 
 .../apache/hadoop/hbase/metrics/BaseSource.java |   8 -
 .../apache/hadoop/metrics2/MetricHistogram.java |   3 +
 hbase-hadoop2-compat/pom.xml|   8 +-
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |  50 +++---
 .../MetricsAssignmentManagerSourceImpl.java |  10 +-
 .../MetricsMasterFilesystemSourceImpl.java  |  14 +-
 .../hbase/master/MetricsMasterSourceImpl.java   |   4 +-
 .../hbase/master/MetricsSnapshotSourceImpl.java |   8 +-
 .../balancer/MetricsBalancerSourceImpl.java |   8 +-
 .../hadoop/hbase/metrics/BaseSourceImpl.java|  17 +-
 .../MetricsRegionServerSourceImpl.java  |  16 +-
 .../regionserver/MetricsRegionSourceImpl.java   |  24 +--
 .../regionserver/wal/MetricsWALSourceImpl.java  |  10 +-
 .../MetricsReplicationGlobalSourceSource.java   |  36 ++---
 .../MetricsReplicationSinkSourceImpl.java   |  16 +-
 .../MetricsReplicationSourceSourceImpl.java |  36 ++---
 .../hbase/rest/MetricsRESTSourceImpl.java   |  38 ++---
 .../thrift/MetricsThriftServerSourceImpl.java   |  13 +-
 .../metrics2/lib/DynamicMetricsRegistry.java| 103 ++--
 .../metrics2/lib/MetricMutableQuantiles.java| 154 --
 .../metrics2/lib/MetricsExecutorImpl.java   |   2 +-
 .../hadoop/metrics2/lib/MutableFastCounter.java |  60 +++
 .../hadoop/metrics2/lib/MutableHistogram.java   | 133 +--
 .../metrics2/lib/MutableRangeHistogram.java |  75 -
 .../metrics2/lib/MutableSizeHistogram.java  |  25 ++-
 .../metrics2/lib/MutableTimeHistogram.java  |  23 ++-
 .../hbase/metrics/TestBaseSourceImpl.java   |   5 +-
 .../tmpl/regionserver/BlockCacheTmpl.jamon  |   8 -
 .../tmpl/regionserver/BlockCacheViewTmpl.jamon  |   1 -
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   |   1 -
 .../hadoop/hbase/io/hfile/AgeSnapshot.java  |  38 +++--
 .../hadoop/hbase/io/hfile/BlockCacheUtil.java   |  31 ++--
 .../hadoop/hbase/io/hfile/CacheStats.java   |  54 +++
 .../org/apache/hadoop/hbase/io/hfile/HFile.java |  10 +-
 .../hadoop/hbase/io/hfile/HFileBlock.java   |   2 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |   4 +-
 .../hbase/io/hfile/bucket/BucketCacheStats.java |  11 +-
 .../hbase/regionserver/StoreFileScanner.java|  14 +-
 40 files changed, 565 insertions(+), 702 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
index 623cbdb..78b2bf0 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util;
 
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicLongArray;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
@@ -31,11 +30,20 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class FastLongHistogram {
+
+  /**
+   * Default number of bins.
+   */
+  public static final int DEFAULT_NBINS = 255;
+
+  public static final double[] DEFAULT_QUANTILES =
+  new double[]{0.25, 0.5, 0.75, 0.90, 0.95, 0.98, 0.99, 0.999};
+
   /**
* Bins is a class containing a list of buckets(or bins) for estimation 
histogram of some data.
*/
   private static class Bins {
-private final AtomicLongArray counts;
+private final 

[1/3] hbase git commit: HBASE-12133 Add FastLongHistogram for metric computation (Yi Deng)

2016-02-24 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/branch-1 bcbe174a2 -> 3352173ec


HBASE-12133 Add FastLongHistogram for metric computation (Yi Deng)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b4f747f5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b4f747f5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b4f747f5

Branch: refs/heads/branch-1
Commit: b4f747f52e24afcf539a313a3bd2243ccf89b6b1
Parents: bcbe174
Author: stack 
Authored: Thu Oct 2 10:38:56 2014 -0700
Committer: Elliott Clark 
Committed: Wed Feb 24 14:35:22 2016 -0800

--
 .../apache/hadoop/hbase/util/AtomicUtils.java   |  63 +
 .../hadoop/hbase/util/FastLongHistogram.java| 233 +++
 .../hbase/util/TestFastLongHistogram.java   |  99 
 3 files changed, 395 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b4f747f5/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AtomicUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AtomicUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AtomicUtils.java
new file mode 100644
index 000..35391ee
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AtomicUtils.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Utilities related to atomic operations.
+ */
+@InterfaceAudience.Private
+public class AtomicUtils {
+  /**
+   * Updates a AtomicLong which is supposed to maintain the minimum values. 
This method is not
+   * synchronized but is thread-safe.
+   */
+  public static void updateMin(AtomicLong min, long value) {
+while (true) {
+  long cur = min.get();
+  if (value >= cur) {
+break;
+  }
+
+  if (min.compareAndSet(cur, value)) {
+break;
+  }
+}
+  }
+
+  /**
+   * Updates a AtomicLong which is supposed to maintain the maximum values. 
This method is not
+   * synchronized but is thread-safe.
+   */
+  public static void updateMax(AtomicLong max, long value) {
+while (true) {
+  long cur = max.get();
+  if (value <= cur) {
+break;
+  }
+
+  if (max.compareAndSet(cur, value)) {
+break;
+  }
+}
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b4f747f5/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
new file mode 100644
index 000..623cbdb
--- /dev/null
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import 

[1/2] hbase git commit: HBASE-15222 Use less contended classes for metrics

2016-02-24 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/master 20e14f449 -> 630a65825


http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
index 32d4fae..aaf4359 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.metrics2.lib;
 
-import java.util.concurrent.atomic.AtomicLongArray;
-
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsInfo;
 
@@ -28,31 +26,30 @@ import org.apache.hadoop.metrics2.MetricsInfo;
  */
 @InterfaceAudience.Private
 public class MutableTimeHistogram extends MutableRangeHistogram {
-  private final String rangeType = "TimeRangeCount";
-  private final long[] ranges =
+  private final static String RANGE_TYPE = "TimeRangeCount";
+  private final static long[] RANGES =
   { 1, 3, 10, 30, 100, 300, 1000, 3000, 1, 3, 6, 12, 
30, 60 };
-  private final AtomicLongArray rangeVals = new 
AtomicLongArray(ranges.length+1);
 
   public MutableTimeHistogram(MetricsInfo info) {
 this(info.name(), info.description());
   }
 
   public MutableTimeHistogram(String name, String description) {
-super(name, description);
+this(name, description, RANGES[RANGES.length - 2]);
+  }
+
+  public MutableTimeHistogram(String name, String description, long 
expectedMax) {
+super(name, description, expectedMax);
   }
 
   @Override
   public String getRangeType() {
-return rangeType;
+return RANGE_TYPE;
   }
 
   @Override
-  public long[] getRange() {
-return ranges;
+  public long[] getRanges() {
+return RANGES;
   }
 
-  @Override
-  public AtomicLongArray getRangeVals() {
-return rangeVals;
-  } 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
 
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
index 7381fb9..2e374f7 100644
--- 
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.metrics;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.testclassification.MetricsTests;
 import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableFastCounter;
 import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -72,9 +73,9 @@ public class TestBaseSourceImpl {
   @Test
   public void testIncCounters() throws Exception {
 bmsi.incCounters("testinccounter", 100);
-assertEquals(100, ((MutableCounterLong) 
bmsi.metricsRegistry.get("testinccounter")).value());
+assertEquals(100, ((MutableFastCounter) 
bmsi.metricsRegistry.get("testinccounter")).value());
 bmsi.incCounters("testinccounter", 100);
-assertEquals(200, ((MutableCounterLong) 
bmsi.metricsRegistry.get("testinccounter")).value());
+assertEquals(200, ((MutableFastCounter) 
bmsi.metricsRegistry.get("testinccounter")).value());
 
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index 6986f12..3dcd5e2 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -178,7 +178,6 @@ org.apache.hadoop.util.StringUtils;
   AgeSnapshot ageAtEvictionSnapshot = bc.getStats().getAgeAtEvictionSnapshot();
   // Only show if non-zero mean and stddev as is the case in combinedblockcache
   double mean = ageAtEvictionSnapshot.getMean();
-  double stddev = ageAtEvictionSnapshot.getStdDev();
 
 
 Evicted
@@ -197,13 +196,6 @@ org.apache.hadoop.util.StringUtils;
 Mean age of Blocks at eviction time (seconds)
  

[2/3] hbase git commit: HBASE-15222 Use less contended classes for metrics

2016-02-24 Thread eclark
http://git-wip-us.apache.org/repos/asf/hbase/blob/3352173e/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
index 32d4fae..aaf4359 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.metrics2.lib;
 
-import java.util.concurrent.atomic.AtomicLongArray;
-
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsInfo;
 
@@ -28,31 +26,30 @@ import org.apache.hadoop.metrics2.MetricsInfo;
  */
 @InterfaceAudience.Private
 public class MutableTimeHistogram extends MutableRangeHistogram {
-  private final String rangeType = "TimeRangeCount";
-  private final long[] ranges =
+  private final static String RANGE_TYPE = "TimeRangeCount";
+  private final static long[] RANGES =
   { 1, 3, 10, 30, 100, 300, 1000, 3000, 1, 3, 6, 12, 
30, 60 };
-  private final AtomicLongArray rangeVals = new 
AtomicLongArray(ranges.length+1);
 
   public MutableTimeHistogram(MetricsInfo info) {
 this(info.name(), info.description());
   }
 
   public MutableTimeHistogram(String name, String description) {
-super(name, description);
+this(name, description, RANGES[RANGES.length - 2]);
+  }
+
+  public MutableTimeHistogram(String name, String description, long 
expectedMax) {
+super(name, description, expectedMax);
   }
 
   @Override
   public String getRangeType() {
-return rangeType;
+return RANGE_TYPE;
   }
 
   @Override
-  public long[] getRange() {
-return ranges;
+  public long[] getRanges() {
+return RANGES;
   }
 
-  @Override
-  public AtomicLongArray getRangeVals() {
-return rangeVals;
-  } 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3352173e/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
 
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
index 7381fb9..2e374f7 100644
--- 
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.metrics;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.testclassification.MetricsTests;
 import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableFastCounter;
 import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -72,9 +73,9 @@ public class TestBaseSourceImpl {
   @Test
   public void testIncCounters() throws Exception {
 bmsi.incCounters("testinccounter", 100);
-assertEquals(100, ((MutableCounterLong) 
bmsi.metricsRegistry.get("testinccounter")).value());
+assertEquals(100, ((MutableFastCounter) 
bmsi.metricsRegistry.get("testinccounter")).value());
 bmsi.incCounters("testinccounter", 100);
-assertEquals(200, ((MutableCounterLong) 
bmsi.metricsRegistry.get("testinccounter")).value());
+assertEquals(200, ((MutableFastCounter) 
bmsi.metricsRegistry.get("testinccounter")).value());
 
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/3352173e/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index 6986f12..3dcd5e2 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -178,7 +178,6 @@ org.apache.hadoop.util.StringUtils;
   AgeSnapshot ageAtEvictionSnapshot = bc.getStats().getAgeAtEvictionSnapshot();
   // Only show if non-zero mean and stddev as is the case in combinedblockcache
   double mean = ageAtEvictionSnapshot.getMean();
-  double stddev = ageAtEvictionSnapshot.getStdDev();
 
 
 Evicted
@@ -197,13 +196,6 @@ org.apache.hadoop.util.StringUtils;
 Mean age of Blocks at eviction time (seconds)
 
 
-<%if stddev > 0 %>
-
-StdDev
-<% String.format("%,d", 

hbase git commit: HBASE-15310 hbase-spark module has compilation failures with clover profile

2016-02-24 Thread jmhsieh
Repository: hbase
Updated Branches:
  refs/heads/master 2a306437a -> 20e14f449


HBASE-15310 hbase-spark module has compilation failures with clover profile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/20e14f44
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/20e14f44
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/20e14f44

Branch: refs/heads/master
Commit: 20e14f449a9d5ba052ef6250c08ee1e4c558ccf2
Parents: 2a30643
Author: Jonathan M Hsieh 
Authored: Wed Feb 24 10:09:21 2016 -0800
Committer: Jonathan M Hsieh 
Committed: Wed Feb 24 11:54:43 2016 -0800

--
 hbase-spark/pom.xml | 35 +++
 1 file changed, 35 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/20e14f44/hbase-spark/pom.xml
--
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
index 7c7590e..7767440 100644
--- a/hbase-spark/pom.xml
+++ b/hbase-spark/pom.xml
@@ -604,6 +604,41 @@
 
 
 
+
+
+
+org.codehaus.mojo
+build-helper-maven-plugin
+
+
+add-source
+validate
+
+add-source
+
+
+
+src/main/scala
+
+
+
+
+add-test-source
+validate
+
+add-test-source
+
+
+
+src/test/scala
+
+
+
+
+
 
 
 



[21/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.Reader.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.Reader.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.Reader.html
index e5d9af6..ea9ea4d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.Reader.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.Reader.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142
+2143  protected void 
closeConnection(Connection connection) 

[30/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
Published site at 28cd48b673ca743d193874b2951bc995699e8e89.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/89b638a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/89b638a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/89b638a4

Branch: refs/heads/asf-site
Commit: 89b638a4306ea07a8e76f8500d50086bf42e08d7
Parents: d02dd5d
Author: jenkins 
Authored: Wed Feb 24 15:24:17 2016 +
Committer: Misty Stanley-Jones 
Committed: Wed Feb 24 09:43:51 2016 -0800

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 apache_hbase_reference_guide.pdfmarks   |4 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   |   54 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |   19 +
 devapidocs/index-all.html   |   12 +
 .../hbase/classification/package-tree.html  |4 +-
 .../hadoop/hbase/client/package-tree.html   |6 +-
 .../hbase/conf/ConfigurationObserver.html   |2 +-
 .../conf/class-use/ConfigurationObserver.html   |7 +
 .../hadoop/hbase/filter/package-tree.html   |8 +-
 .../hadoop/hbase/io/hfile/package-tree.html |6 +-
 .../hbase/ipc/BalancedQueueRpcExecutor.html |   15 +-
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.html|   25 +-
 .../hbase/ipc/RpcExecutor.QueueBalancer.html|6 +-
 .../ipc/RpcExecutor.RandomQueueBalancer.html|8 +-
 .../apache/hadoop/hbase/ipc/RpcExecutor.html|   94 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.html  |   76 +-
 ...mpleRpcScheduler.CallPriorityComparator.html |   12 +-
 .../hadoop/hbase/ipc/SimpleRpcScheduler.html|   81 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |2 +-
 .../hadoop/hbase/mapreduce/package-tree.html|4 +-
 .../hbase/master/balancer/package-tree.html |2 +-
 .../hadoop/hbase/master/package-tree.html   |4 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   10 +-
 .../hadoop/hbase/procedure2/package-tree.html   |2 +-
 .../hadoop/hbase/quotas/package-tree.html   |2 +-
 .../hadoop/hbase/regionserver/package-tree.html |   24 +-
 .../hbase/security/access/package-tree.html |2 +-
 .../hadoop/hbase/security/package-tree.html |4 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |  270 ++---
 .../hbase/tmpl/master/MasterStatusTmpl.html |  108 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |   54 +-
 .../regionserver/RSStatusTmpl.ImplData.html |  120 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |   48 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html |   24 +-
 .../apache/hadoop/hbase/util/package-tree.html  |4 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |2 +-
 devapidocs/overview-tree.html   |2 +-
 .../hbase/ipc/BalancedQueueRpcExecutor.html |   51 +-
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.html|  211 ++--
 .../hbase/ipc/RpcExecutor.QueueBalancer.html|  341 +++---
 .../ipc/RpcExecutor.RandomQueueBalancer.html|  341 +++---
 .../apache/hadoop/hbase/ipc/RpcExecutor.html|  341 +++---
 .../RpcServer.BlockingServiceAndInterface.html  | 1099 +-
 .../apache/hadoop/hbase/ipc/RpcServer.Call.html | 1099 +-
 .../hadoop/hbase/ipc/RpcServer.Connection.html  | 1099 +-
 .../hbase/ipc/RpcServer.Listener.Reader.html| 1099 +-
 .../hadoop/hbase/ipc/RpcServer.Listener.html| 1099 +-
 .../hadoop/hbase/ipc/RpcServer.Responder.html   | 1099 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.html  | 1099 +-
 ...mpleRpcScheduler.CallPriorityComparator.html |  404 +++
 .../hadoop/hbase/ipc/SimpleRpcScheduler.html|  404 +++
 .../hadoop/hbase/master/TableStateManager.html  |2 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |  270 ++---
 .../tmpl/master/MasterStatusTmpl.Intf.html  |  270 ++---
 .../hbase/tmpl/master/MasterStatusTmpl.html |  270 ++---
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |   72 +-
 .../regionserver/RSStatusTmpl.ImplData.html |  120 +-
 .../tmpl/regionserver/RSStatusTmpl.Intf.html|  120 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |  120 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html |   32 +-
 distribution-management.html 

[06/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref-test/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
--
diff --git 
a/xref-test/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
 
b/xref-test/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
index fb03c1b..ad3a0be 100644
--- 
a/xref-test/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
+++ 
b/xref-test/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
@@ -88,478 +88,483 @@
 78  import org.junit.Rule;
 79  import org.junit.Test;
 80  import org.junit.experimental.categories.Category;
-81  import org.junit.rules.TestRule;
-82  
-83  import com.google.common.base.Joiner;
-84  import com.google.protobuf.RpcController;
-85  import com.google.protobuf.ServiceException;
-86  
-87  /**
-88   * Like {@link TestRegionMergeTransaction} in that 
we're testing
-89   * {@link RegionMergeTransactionImpl} only the below 
tests are against a running
-90   * cluster where {@link TestRegionMergeTransaction} 
is tests against bare
-91   * {@link HRegion}.
-92   */
-93  
@Category({RegionServerTests.class, 
LargeTests.class})
-94  public class 
TestRegionMergeTransactionOnCluster
 {
-95private static final 
Log LOG = LogFactory
-96
.getLog(TestRegionMergeTransactionOnCluster.class);
-97@Rule public final 
TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-98
withLookingForStuckThread(true).build();
-99private static final 
int NB_SERVERS = 3;
-100 
-101   private static final 
byte[] FAMILYNAME = Bytes.toBytes("fam");
-102   private static final 
byte[] QUALIFIER = Bytes.toBytes("q");
-103 
-104   private static byte[] ROW = Bytes.toBytes("testRow");
-105   private static final 
int INITIAL_REGION_NUM = 10;
-106   private static final 
int ROWSIZE = 200;
-107   private static byte[][] ROWS = makeN(ROW, ROWSIZE);
-108 
-109   private static int 
waitTime = 60 * 1000;
+81  import org.junit.rules.TestName;
+82  import org.junit.rules.TestRule;
+83  
+84  import com.google.common.base.Joiner;
+85  import com.google.protobuf.RpcController;
+86  import com.google.protobuf.ServiceException;
+87  
+88  /**
+89   * Like {@link TestRegionMergeTransaction} in that 
we're testing
+90   * {@link RegionMergeTransactionImpl} only the below 
tests are against a running
+91   * cluster where {@link TestRegionMergeTransaction} 
is tests against bare
+92   * {@link HRegion}.
+93   */
+94  
@Category({RegionServerTests.class, 
LargeTests.class})
+95  public class 
TestRegionMergeTransactionOnCluster
 {
+96private static final 
Log LOG = LogFactory
+97
.getLog(TestRegionMergeTransactionOnCluster.class);
+98@Rule public TestName name = new TestName();
+99@Rule public final 
TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
+100   
withLookingForStuckThread(true).build();
+101   private static final 
int NB_SERVERS = 3;
+102 
+103   private static final 
byte[] FAMILYNAME = Bytes.toBytes("fam");
+104   private static final 
byte[] QUALIFIER = Bytes.toBytes("q");
+105 
+106   private static byte[] ROW = Bytes.toBytes("testRow");
+107   private static final 
int INITIAL_REGION_NUM = 10;
+108   private static final 
int ROWSIZE = 200;
+109   private static byte[][] ROWS = makeN(ROW, ROWSIZE);
 110 
-111   static final 
HBaseTestingUtility
 TEST_UTIL = new HBaseTestingUtility();
+111   private static int 
waitTime = 60 * 1000;
 112 
-113   private static HMaster MASTER;
-114   private static Admin ADMIN;
-115 
-116   @BeforeClass
-117   public static 
void beforeAllTests() throws Exception {
-118 // Start a cluster
-119 
TEST_UTIL.startMiniCluster(1, NB_SERVERS, null, MyMaster.class, null);
-120 MiniHBaseCluster
 cluster = TEST_UTIL.getHBaseCluster();
-121 MASTER = 
cluster.getMaster();
-122 
MASTER.balanceSwitch(false);
-123 ADMIN = 
TEST_UTIL.getConnection().getAdmin();
-124   }
-125 
-126   @AfterClass
-127   public static 
void afterAllTests() throws Exception {
-128 
TEST_UTIL.shutdownMiniCluster();
-129 if (ADMIN != null) ADMIN.close();
-130   }
-131 
-132   @Test
-133   public void 
testWholesomeMerge() throws Exception {
-134 LOG.info("Starting testWholesomeMerge");
-135 final TableName tableName =
-136 
TableName.valueOf("testWholesomeMerge");
-137 
-138 // Create table and load data.
-139 Table table = 
createTableAndLoadData(MASTER, tableName);
-140 // Merge 1st and 2nd region
-141 
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 0, 1,
-142 
INITIAL_REGION_NUM - 1);
-143 
-144 // Merge 2nd and 3th region
-145 
PairOfSameTypeHRegionInfo mergedRegions =
-146   
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 1, 2,
-147 
INITIAL_REGION_NUM - 2);
-148 
-149 

[15/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index 52d47c0..b8ef76a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -67,15 +67,15 @@
 059  requiredArguments = {
 060@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 061  optionalArguments = {
-062@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-063@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
-064@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-065@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-066@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-067@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-068@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-069@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-070@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager")})
+062@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+063@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+064@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+065@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+066@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+067@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+068@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
+069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
+070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean")})
 071public class MasterStatusTmpl
 072  extends 
org.jamon.AbstractTemplateProxy
 073{
@@ -116,159 +116,159 @@
 108  return m_master;
 109}
 110private HMaster m_master;
-111// 21, 1
-112public void 
setFrags(MapString,Integer frags)
+111// 28, 1
+112public void 
setServerManager(ServerManager serverManager)
 113{
-114  // 21, 1
-115  m_frags = frags;
-116  m_frags__IsNotDefault = true;
+114  // 28, 1
+115  m_serverManager = serverManager;
+116  m_serverManager__IsNotDefault = 
true;
 117}
-118public MapString,Integer 
getFrags()
+118public ServerManager 
getServerManager()
 119{
-120  return m_frags;
+120  return m_serverManager;
 121}
-122private MapString,Integer 
m_frags;
-123public boolean 
getFrags__IsNotDefault()
+122private ServerManager 
m_serverManager;
+123public boolean 
getServerManager__IsNotDefault()
 124{
-125  return m_frags__IsNotDefault;
+125  return 
m_serverManager__IsNotDefault;
 126}
-127private boolean 
m_frags__IsNotDefault;
-128// 25, 1
-129public void 
setCatalogJanitorEnabled(boolean catalogJanitorEnabled)
+127private boolean 
m_serverManager__IsNotDefault;
+128// 22, 1
+129public void 
setMetaLocation(ServerName metaLocation)
 130{
-131  // 25, 1
-132  m_catalogJanitorEnabled = 
catalogJanitorEnabled;
-133  
m_catalogJanitorEnabled__IsNotDefault = true;
+131  // 22, 1
+132  m_metaLocation = metaLocation;
+133  m_metaLocation__IsNotDefault = 
true;
 134}
-135public boolean 
getCatalogJanitorEnabled()
+135public ServerName getMetaLocation()
 136{
-137  return m_catalogJanitorEnabled;
+137  return m_metaLocation;
 138}
-139private boolean 
m_catalogJanitorEnabled;
-140public boolean 
getCatalogJanitorEnabled__IsNotDefault()
+139private ServerName m_metaLocation;
+140public boolean 
getMetaLocation__IsNotDefault()
 141{
-142  return 
m_catalogJanitorEnabled__IsNotDefault;
+142  return 
m_metaLocation__IsNotDefault;
 143}
-144private boolean 
m_catalogJanitorEnabled__IsNotDefault;
-145// 26, 1
-146public void setFilter(String 
filter)
+144private boolean 
m_metaLocation__IsNotDefault;
+145// 21, 1
+146public void 
setFrags(MapString,Integer frags)
 147{
-148  // 26, 1
-149  m_filter = filter;
-150  m_filter__IsNotDefault = true;
+148  // 21, 1
+149  m_frags = frags;
+150  m_frags__IsNotDefault = true;
 151}
-152public String getFilter()
+152public MapString,Integer 
getFrags()
 153{
-154  return m_filter;
+154  return m_frags;
 155  

[03/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
--
diff --git a/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html 
b/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
index e90a138..0549d41 100644
--- a/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
+++ b/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
@@ -38,203 +38,219 @@
 28  import org.apache.hadoop.hbase.HConstants;
 29  import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 30  import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-31  import 
org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue;
-32  
-33  /**
-34   * A scheduler that maintains isolated handler pools 
for general,
-35   * high-priority, and replication requests.
-36   */
-37  
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, 
HBaseInterfaceAudience.PHOENIX})
-38  
@InterfaceStability.Evolving
-39  public class 
SimpleRpcScheduler
 extends RpcScheduler
 {
-40private static final 
Log LOG = LogFactory.getLog(SimpleRpcScheduler.class);
-41  
-42public static 
final String 
CALL_QUEUE_READ_SHARE_CONF_KEY =
-43"hbase.ipc.server.callqueue.read.ratio";
-44public static 
final String 
CALL_QUEUE_SCAN_SHARE_CONF_KEY =
-45"hbase.ipc.server.callqueue.scan.ratio";
-46public static 
final String 
CALL_QUEUE_HANDLER_FACTOR_CONF_KEY =
-47"hbase.ipc.server.callqueue.handler.factor";
-48  
-49/** If set to 
'deadline', uses a priority queue and deprioritize long-running scans */
-50public static 
final String CALL_QUEUE_TYPE_CONF_KEY = 
"hbase.ipc.server.callqueue.type";
-51public static 
final String 
CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE = "deadline";
-52public static 
final String 
CALL_QUEUE_TYPE_FIFO_CONF_VALUE = "fifo";
-53  
-54/** max delay in 
msec used to bound the deprioritized requests */
-55public static 
final String QUEUE_MAX_CALL_DELAY_CONF_KEY
-56= "hbase.ipc.server.queue.max.call.delay";
-57  
-58/**
-59 * Comparator used by the "normal callQueue" if 
DEADLINE_CALL_QUEUE_CONF_KEY is set to true.
-60 * It uses the calculated "deadline" e.g. to 
deprioritize long-running job
-61 *
-62 * If multiple requests have the same deadline 
BoundedPriorityBlockingQueue will order them in
-63 * FIFO (first-in-first-out) manner.
-64 */
-65private static class 
CallPriorityComparator
 implements ComparatorCallRunner {
-66  private final 
static int DEFAULT_MAX_CALL_DELAY = 5000;
-67  
-68  private final 
PriorityFunction
 priority;
-69  private final 
int maxDelay;
-70  
-71  public CallPriorityComparator(final Configuration conf, final PriorityFunction
 priority) {
-72this.priority = priority;
-73this.maxDelay = 
conf.getInt(QUEUE_MAX_CALL_DELAY_CONF_KEY, DEFAULT_MAX_CALL_DELAY);
-74  }
-75  
-76  @Override
-77  public int 
compare(CallRunner
 a, CallRunner
 b) {
-78RpcServer.Call 
callA = a.getCall();
-79RpcServer.Call 
callB = b.getCall();
-80long deadlineA = 
priority.getDeadline(callA.getHeader(), callA.param);
-81long deadlineB = 
priority.getDeadline(callB.getHeader(), callB.param);
-82deadlineA = 
callA.timestamp + Math.min(deadlineA, maxDelay);
-83deadlineB = 
callB.timestamp + Math.min(deadlineB, maxDelay);
-84return (int)(deadlineA - deadlineB);
-85  }
-86}
-87  
-88private int 
port;
-89private final 
PriorityFunction
 priority;
-90private final 
RpcExecutor
 callExecutor;
-91private final 
RpcExecutor
 priorityExecutor;
-92private final 
RpcExecutor
 replicationExecutor;
-93  
-94/** What level a 
high priority call is at. */
-95private final 
int highPriorityLevel;
-96  
-97private Abortable 
abortable = null;
-98  
-99/**
-100* @param conf
-101* @param handlerCount the number of handler 
threads that will be used to process calls
-102* @param priorityHandlerCount How many threads 
for priority handling.
-103* @param replicationHandlerCount How many threads 
for replication handling.
-104* @param highPriorityLevel
-105* @param priority Function to extract request 
priority.
-106*/
-107   public SimpleRpcScheduler(
-108   Configuration 
conf,
-109   int handlerCount,
-110   int priorityHandlerCount,
-111   int replicationHandlerCount,
-112   PriorityFunction
 priority,
-113   Abortable 
server,
-114   int highPriorityLevel) {
-115 int maxQueueLength = conf.getInt("hbase.ipc.server.max.callqueue.length",
-116 handlerCount 
* RpcServer.DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER);
-117 this.priority = priority;
-118 this.highPriorityLevel = highPriorityLevel;
-119 this.abortable = server;
-120 
-121 String 
callQueueType = 

[14/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
index 46c00c7..049b2e1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
@@ -33,10 +33,10 @@
 025  requiredArguments = {
 026@org.jamon.annotations.Argument(name 
= "regionServer", type = "HRegionServer")},
 027  optionalArguments = {
-028@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-029@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-030@org.jamon.annotations.Argument(name 
= "bcv", type = "String"),
-031@org.jamon.annotations.Argument(name 
= "bcn", type = "String")})
+028@org.jamon.annotations.Argument(name 
= "bcv", type = "String"),
+029@org.jamon.annotations.Argument(name 
= "bcn", type = "String"),
+030@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+031@org.jamon.annotations.Argument(name 
= "filter", type = "String")})
 032public class RSStatusTmpl
 033  extends 
org.jamon.AbstractTemplateProxy
 034{
@@ -77,74 +77,74 @@
 069  return m_regionServer;
 070}
 071private HRegionServer 
m_regionServer;
-072// 22, 1
-073public void setFormat(String 
format)
+072// 24, 1
+073public void setBcv(String bcv)
 074{
-075  // 22, 1
-076  m_format = format;
-077  m_format__IsNotDefault = true;
+075  // 24, 1
+076  m_bcv = bcv;
+077  m_bcv__IsNotDefault = true;
 078}
-079public String getFormat()
+079public String getBcv()
 080{
-081  return m_format;
+081  return m_bcv;
 082}
-083private String m_format;
-084public boolean 
getFormat__IsNotDefault()
+083private String m_bcv;
+084public boolean 
getBcv__IsNotDefault()
 085{
-086  return m_format__IsNotDefault;
+086  return m_bcv__IsNotDefault;
 087}
-088private boolean 
m_format__IsNotDefault;
-089// 21, 1
-090public void setFilter(String 
filter)
+088private boolean 
m_bcv__IsNotDefault;
+089// 23, 1
+090public void setBcn(String bcn)
 091{
-092  // 21, 1
-093  m_filter = filter;
-094  m_filter__IsNotDefault = true;
+092  // 23, 1
+093  m_bcn = bcn;
+094  m_bcn__IsNotDefault = true;
 095}
-096public String getFilter()
+096public String getBcn()
 097{
-098  return m_filter;
+098  return m_bcn;
 099}
-100private String m_filter;
-101public boolean 
getFilter__IsNotDefault()
+100private String m_bcn;
+101public boolean 
getBcn__IsNotDefault()
 102{
-103  return m_filter__IsNotDefault;
+103  return m_bcn__IsNotDefault;
 104}
-105private boolean 
m_filter__IsNotDefault;
-106// 24, 1
-107public void setBcv(String bcv)
+105private boolean 
m_bcn__IsNotDefault;
+106// 22, 1
+107public void setFormat(String 
format)
 108{
-109  // 24, 1
-110  m_bcv = bcv;
-111  m_bcv__IsNotDefault = true;
+109  // 22, 1
+110  m_format = format;
+111  m_format__IsNotDefault = true;
 112}
-113public String getBcv()
+113public String getFormat()
 114{
-115  return m_bcv;
+115  return m_format;
 116}
-117private String m_bcv;
-118public boolean 
getBcv__IsNotDefault()
+117private String m_format;
+118public boolean 
getFormat__IsNotDefault()
 119{
-120  return m_bcv__IsNotDefault;
+120  return m_format__IsNotDefault;
 121}
-122private boolean 
m_bcv__IsNotDefault;
-123// 23, 1
-124public void setBcn(String bcn)
+122private boolean 
m_format__IsNotDefault;
+123// 21, 1
+124public void setFilter(String 
filter)
 125{
-126  // 23, 1
-127  m_bcn = bcn;
-128  m_bcn__IsNotDefault = true;
+126  // 21, 1
+127  m_filter = filter;
+128  m_filter__IsNotDefault = true;
 129}
-130public String getBcn()
+130public String getFilter()
 131{
-132  return m_bcn;
+132  return m_filter;
 133}
-134private String m_bcn;
-135public boolean 
getBcn__IsNotDefault()
+134private String m_filter;
+135public boolean 
getFilter__IsNotDefault()
 136{
-137  return m_bcn__IsNotDefault;
+137  return m_filter__IsNotDefault;
 138}
-139private boolean 
m_bcn__IsNotDefault;
+139private boolean 
m_filter__IsNotDefault;
 140  }
 141  @Override
 142  protected 
org.jamon.AbstractTemplateProxy.ImplData makeImplData()
@@ -156,31 +156,31 @@
 148return (ImplData) 
super.getImplData();
 149  }
 150  
-151  protected String format;
-152  public final 

[08/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
index 1bdbf69..f83e138 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.html
@@ -86,478 +86,483 @@
 078import org.junit.Rule;
 079import org.junit.Test;
 080import 
org.junit.experimental.categories.Category;
-081import org.junit.rules.TestRule;
-082
-083import com.google.common.base.Joiner;
-084import 
com.google.protobuf.RpcController;
-085import 
com.google.protobuf.ServiceException;
-086
-087/**
-088 * Like {@link 
TestRegionMergeTransaction} in that we're testing
-089 * {@link RegionMergeTransactionImpl} 
only the below tests are against a running
-090 * cluster where {@link 
TestRegionMergeTransaction} is tests against bare
-091 * {@link HRegion}.
-092 */
-093@Category({RegionServerTests.class, 
LargeTests.class})
-094public class 
TestRegionMergeTransactionOnCluster {
-095  private static final Log LOG = 
LogFactory
-096  
.getLog(TestRegionMergeTransactionOnCluster.class);
-097  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-098  
withLookingForStuckThread(true).build();
-099  private static final int NB_SERVERS = 
3;
-100
-101  private static final byte[] FAMILYNAME 
= Bytes.toBytes("fam");
-102  private static final byte[] QUALIFIER = 
Bytes.toBytes("q");
-103
-104  private static byte[] ROW = 
Bytes.toBytes("testRow");
-105  private static final int 
INITIAL_REGION_NUM = 10;
-106  private static final int ROWSIZE = 
200;
-107  private static byte[][] ROWS = 
makeN(ROW, ROWSIZE);
-108
-109  private static int waitTime = 60 * 
1000;
+081import org.junit.rules.TestName;
+082import org.junit.rules.TestRule;
+083
+084import com.google.common.base.Joiner;
+085import 
com.google.protobuf.RpcController;
+086import 
com.google.protobuf.ServiceException;
+087
+088/**
+089 * Like {@link 
TestRegionMergeTransaction} in that we're testing
+090 * {@link RegionMergeTransactionImpl} 
only the below tests are against a running
+091 * cluster where {@link 
TestRegionMergeTransaction} is tests against bare
+092 * {@link HRegion}.
+093 */
+094@Category({RegionServerTests.class, 
LargeTests.class})
+095public class 
TestRegionMergeTransactionOnCluster {
+096  private static final Log LOG = 
LogFactory
+097  
.getLog(TestRegionMergeTransactionOnCluster.class);
+098  @Rule public TestName name = new 
TestName();
+099  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
+100  
withLookingForStuckThread(true).build();
+101  private static final int NB_SERVERS = 
3;
+102
+103  private static final byte[] FAMILYNAME 
= Bytes.toBytes("fam");
+104  private static final byte[] QUALIFIER = 
Bytes.toBytes("q");
+105
+106  private static byte[] ROW = 
Bytes.toBytes("testRow");
+107  private static final int 
INITIAL_REGION_NUM = 10;
+108  private static final int ROWSIZE = 
200;
+109  private static byte[][] ROWS = 
makeN(ROW, ROWSIZE);
 110
-111  static final HBaseTestingUtility 
TEST_UTIL = new HBaseTestingUtility();
+111  private static int waitTime = 60 * 
1000;
 112
-113  private static HMaster MASTER;
-114  private static Admin ADMIN;
-115
-116  @BeforeClass
-117  public static void beforeAllTests() 
throws Exception {
-118// Start a cluster
-119TEST_UTIL.startMiniCluster(1, 
NB_SERVERS, null, MyMaster.class, null);
-120MiniHBaseCluster cluster = 
TEST_UTIL.getHBaseCluster();
-121MASTER = cluster.getMaster();
-122MASTER.balanceSwitch(false);
-123ADMIN = 
TEST_UTIL.getConnection().getAdmin();
-124  }
-125
-126  @AfterClass
-127  public static void afterAllTests() 
throws Exception {
-128TEST_UTIL.shutdownMiniCluster();
-129if (ADMIN != null) ADMIN.close();
-130  }
-131
-132  @Test
-133  public void testWholesomeMerge() throws 
Exception {
-134LOG.info("Starting 
testWholesomeMerge");
-135final TableName tableName =
-136
TableName.valueOf("testWholesomeMerge");
-137
-138// Create table and load data.
-139Table table = 
createTableAndLoadData(MASTER, tableName);
-140// Merge 1st and 2nd region
-141
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 0, 1,
-142INITIAL_REGION_NUM - 1);
-143
-144// Merge 2nd and 3th region
-145PairOfSameTypeHRegionInfo 
mergedRegions =
-146  
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 1, 2,
-147INITIAL_REGION_NUM - 2);
-148
-149verifyRowCount(table, ROWSIZE);

[10/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMaster.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMaster.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMaster.html
index 1bdbf69..f83e138 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMaster.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMaster.html
@@ -86,478 +86,483 @@
 078import org.junit.Rule;
 079import org.junit.Test;
 080import 
org.junit.experimental.categories.Category;
-081import org.junit.rules.TestRule;
-082
-083import com.google.common.base.Joiner;
-084import 
com.google.protobuf.RpcController;
-085import 
com.google.protobuf.ServiceException;
-086
-087/**
-088 * Like {@link 
TestRegionMergeTransaction} in that we're testing
-089 * {@link RegionMergeTransactionImpl} 
only the below tests are against a running
-090 * cluster where {@link 
TestRegionMergeTransaction} is tests against bare
-091 * {@link HRegion}.
-092 */
-093@Category({RegionServerTests.class, 
LargeTests.class})
-094public class 
TestRegionMergeTransactionOnCluster {
-095  private static final Log LOG = 
LogFactory
-096  
.getLog(TestRegionMergeTransactionOnCluster.class);
-097  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-098  
withLookingForStuckThread(true).build();
-099  private static final int NB_SERVERS = 
3;
-100
-101  private static final byte[] FAMILYNAME 
= Bytes.toBytes("fam");
-102  private static final byte[] QUALIFIER = 
Bytes.toBytes("q");
-103
-104  private static byte[] ROW = 
Bytes.toBytes("testRow");
-105  private static final int 
INITIAL_REGION_NUM = 10;
-106  private static final int ROWSIZE = 
200;
-107  private static byte[][] ROWS = 
makeN(ROW, ROWSIZE);
-108
-109  private static int waitTime = 60 * 
1000;
+081import org.junit.rules.TestName;
+082import org.junit.rules.TestRule;
+083
+084import com.google.common.base.Joiner;
+085import 
com.google.protobuf.RpcController;
+086import 
com.google.protobuf.ServiceException;
+087
+088/**
+089 * Like {@link 
TestRegionMergeTransaction} in that we're testing
+090 * {@link RegionMergeTransactionImpl} 
only the below tests are against a running
+091 * cluster where {@link 
TestRegionMergeTransaction} is tests against bare
+092 * {@link HRegion}.
+093 */
+094@Category({RegionServerTests.class, 
LargeTests.class})
+095public class 
TestRegionMergeTransactionOnCluster {
+096  private static final Log LOG = 
LogFactory
+097  
.getLog(TestRegionMergeTransactionOnCluster.class);
+098  @Rule public TestName name = new 
TestName();
+099  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
+100  
withLookingForStuckThread(true).build();
+101  private static final int NB_SERVERS = 
3;
+102
+103  private static final byte[] FAMILYNAME 
= Bytes.toBytes("fam");
+104  private static final byte[] QUALIFIER = 
Bytes.toBytes("q");
+105
+106  private static byte[] ROW = 
Bytes.toBytes("testRow");
+107  private static final int 
INITIAL_REGION_NUM = 10;
+108  private static final int ROWSIZE = 
200;
+109  private static byte[][] ROWS = 
makeN(ROW, ROWSIZE);
 110
-111  static final HBaseTestingUtility 
TEST_UTIL = new HBaseTestingUtility();
+111  private static int waitTime = 60 * 
1000;
 112
-113  private static HMaster MASTER;
-114  private static Admin ADMIN;
-115
-116  @BeforeClass
-117  public static void beforeAllTests() 
throws Exception {
-118// Start a cluster
-119TEST_UTIL.startMiniCluster(1, 
NB_SERVERS, null, MyMaster.class, null);
-120MiniHBaseCluster cluster = 
TEST_UTIL.getHBaseCluster();
-121MASTER = cluster.getMaster();
-122MASTER.balanceSwitch(false);
-123ADMIN = 
TEST_UTIL.getConnection().getAdmin();
-124  }
-125
-126  @AfterClass
-127  public static void afterAllTests() 
throws Exception {
-128TEST_UTIL.shutdownMiniCluster();
-129if (ADMIN != null) ADMIN.close();
-130  }
-131
-132  @Test
-133  public void testWholesomeMerge() throws 
Exception {
-134LOG.info("Starting 
testWholesomeMerge");
-135final TableName tableName =
-136
TableName.valueOf("testWholesomeMerge");
-137
-138// Create table and load data.
-139Table table = 
createTableAndLoadData(MASTER, tableName);
-140// Merge 1st and 2nd region
-141
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 0, 1,
-142INITIAL_REGION_NUM - 1);
-143
-144// Merge 2nd and 3th region
-145PairOfSameTypeHRegionInfo 
mergedRegions =
-146  
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 1, 2,
-147INITIAL_REGION_NUM - 2);

[26/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
index e27531a..c56cb96 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
@@ -224,40 +224,40 @@ implements HRegionServer regionServer
 
 
-
+
 
 
 
 
-format
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String format
+bcv
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcv
 
 
-
+
 
 
 
 
-filter
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String filter
+bcn
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcn
 
 
-
+
 
 
 
 
-bcv
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcv
+format
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String format
 
 
-
+
 
 
 
 
-bcn
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcn
+filter
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String filter
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 98cd86f..52b8280 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -463,12 +463,12 @@
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer 
(implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
+org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer 
(implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
-org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.PoolMap.PoolType
 org.apache.hadoop.hbase.util.ChecksumType
+org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 1f810f3..8260133 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -148,9 +148,9 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.wal.WALKey.Version
 org.apache.hadoop.hbase.wal.WALFactory.Providers
 org.apache.hadoop.hbase.wal.RegionGroupingProvider.Strategies
+org.apache.hadoop.hbase.wal.WALKey.Version
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index be4ca28..2a614f4 100644
--- a/devapidocs/overview-tree.html
+++ 

[17/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.CallPriorityComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.CallPriorityComparator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.CallPriorityComparator.html
index 856be7b..6beef12 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.CallPriorityComparator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.CallPriorityComparator.html
@@ -36,203 +36,219 @@
 028import 
org.apache.hadoop.hbase.HConstants;
 029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-031import 
org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue;
-032
-033/**
-034 * A scheduler that maintains isolated 
handler pools for general,
-035 * high-priority, and replication 
requests.
-036 */
-037@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-038@InterfaceStability.Evolving
-039public class SimpleRpcScheduler extends 
RpcScheduler {
-040  private static final Log LOG = 
LogFactory.getLog(SimpleRpcScheduler.class);
-041
-042  public static final String 
CALL_QUEUE_READ_SHARE_CONF_KEY =
-043  
"hbase.ipc.server.callqueue.read.ratio";
-044  public static final String 
CALL_QUEUE_SCAN_SHARE_CONF_KEY =
-045  
"hbase.ipc.server.callqueue.scan.ratio";
-046  public static final String 
CALL_QUEUE_HANDLER_FACTOR_CONF_KEY =
-047  
"hbase.ipc.server.callqueue.handler.factor";
-048
-049  /** If set to 'deadline', uses a 
priority queue and deprioritize long-running scans */
-050  public static final String 
CALL_QUEUE_TYPE_CONF_KEY = "hbase.ipc.server.callqueue.type";
-051  public static final String 
CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE = "deadline";
-052  public static final String 
CALL_QUEUE_TYPE_FIFO_CONF_VALUE = "fifo";
-053
-054  /** max delay in msec used to bound the 
deprioritized requests */
-055  public static final String 
QUEUE_MAX_CALL_DELAY_CONF_KEY
-056  = 
"hbase.ipc.server.queue.max.call.delay";
-057
-058  /**
-059   * Comparator used by the "normal 
callQueue" if DEADLINE_CALL_QUEUE_CONF_KEY is set to true.
-060   * It uses the calculated "deadline" 
e.g. to deprioritize long-running job
-061   *
-062   * If multiple requests have the same 
deadline BoundedPriorityBlockingQueue will order them in
-063   * FIFO (first-in-first-out) manner.
-064   */
-065  private static class 
CallPriorityComparator implements ComparatorCallRunner {
-066private final static int 
DEFAULT_MAX_CALL_DELAY = 5000;
-067
-068private final PriorityFunction 
priority;
-069private final int maxDelay;
-070
-071public CallPriorityComparator(final 
Configuration conf, final PriorityFunction priority) {
-072  this.priority = priority;
-073  this.maxDelay = 
conf.getInt(QUEUE_MAX_CALL_DELAY_CONF_KEY, DEFAULT_MAX_CALL_DELAY);
-074}
-075
-076@Override
-077public int compare(CallRunner a, 
CallRunner b) {
-078  RpcServer.Call callA = 
a.getCall();
-079  RpcServer.Call callB = 
b.getCall();
-080  long deadlineA = 
priority.getDeadline(callA.getHeader(), callA.param);
-081  long deadlineB = 
priority.getDeadline(callB.getHeader(), callB.param);
-082  deadlineA = callA.timestamp + 
Math.min(deadlineA, maxDelay);
-083  deadlineB = callB.timestamp + 
Math.min(deadlineB, maxDelay);
-084  return (int)(deadlineA - 
deadlineB);
-085}
-086  }
-087
-088  private int port;
-089  private final PriorityFunction 
priority;
-090  private final RpcExecutor 
callExecutor;
-091  private final RpcExecutor 
priorityExecutor;
-092  private final RpcExecutor 
replicationExecutor;
-093
-094  /** What level a high priority call is 
at. */
-095  private final int highPriorityLevel;
-096
-097  private Abortable abortable = null;
-098
-099  /**
-100   * @param conf
-101   * @param handlerCount the number of 
handler threads that will be used to process calls
-102   * @param priorityHandlerCount How many 
threads for priority handling.
-103   * @param replicationHandlerCount How 
many threads for replication handling.
-104   * @param highPriorityLevel
-105   * @param priority Function to extract 
request priority.
-106   */
-107  public SimpleRpcScheduler(
-108  Configuration conf,
-109  int handlerCount,
-110  int priorityHandlerCount,
-111  int replicationHandlerCount,
-112  PriorityFunction priority,
-113  Abortable server,
-114  int highPriorityLevel) {
-115int maxQueueLength = 
conf.getInt("hbase.ipc.server.max.callqueue.length",
-116handlerCount * 
RpcServer.DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER);
-117this.priority = priority;
-118this.highPriorityLevel = 
highPriorityLevel;
-119

[05/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.html
--
diff --git a/xref/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.html 
b/xref/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.html
index 738c35c..af5539b 100644
--- a/xref/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.html
+++ b/xref/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.html
@@ -76,31 +76,40 @@
 66  
 67protected void initializeQueues(final int 
numQueues,
 68final Class? extends BlockingQueue queueClass, Object... 
initargs) {
-69  for (int i = 
0; i  numQueues; ++i) {
-70
queues.add((BlockingQueueCallRunner) 
ReflectionUtils.newInstance(queueClass, initargs));
-71  }
-72}
-73  
-74@Override
-75public boolean dispatch(final CallRunner
 callTask) throws InterruptedException {
-76  int queueIndex = balancer.getNextQueue();
-77  return queues.get(queueIndex).offer(callTask);
-78}
-79  
-80@Override
-81public int 
getQueueLength() {
-82  int length = 0;
-83  for (final 
BlockingQueueCallRunner queue : queues) {
-84length += 
queue.size();
+69  if (initargs.length  0) {
+70currentQueueLimit 
= (int) initargs[0];
+71initargs[0] = 
Math.max((int) initargs[0], 
DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT);
+72  }
+73  for (int i = 
0; i  numQueues; ++i) {
+74
queues.add((BlockingQueueCallRunner) 
ReflectionUtils.newInstance(queueClass, initargs));
+75  }
+76}
+77  
+78@Override
+79public boolean dispatch(final CallRunner
 callTask) throws InterruptedException {
+80  int queueIndex = balancer.getNextQueue();
+81  
BlockingQueueCallRunner queue = queues.get(queueIndex);
+82  // that means we can overflow by at most num reader 
size (5), that's ok
+83  if (queue.size() = currentQueueLimit) {
+84return false;
 85  }
-86  return length;
+86  return queue.offer(callTask);
 87}
 88  
 89@Override
-90public ListBlockingQueueCallRunner 
getQueues() {
-91  return queues;
-92}
-93  }
+90public int 
getQueueLength() {
+91  int length = 0;
+92  for (final 
BlockingQueueCallRunner queue : queues) {
+93length += 
queue.size();
+94  }
+95  return length;
+96}
+97  
+98@Override
+99public ListBlockingQueueCallRunner 
getQueues() {
+100 return queues;
+101   }
+102 }
 
 This page was automatically generated by http://maven.apache.org/;>Maven
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
--
diff --git a/xref/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html 
b/xref/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
index 5498e19..05441c7 100644
--- a/xref/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
+++ b/xref/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
@@ -149,112 +149,127 @@
 139   " readQueues=" + numReadQueues + " readHandlers=" + readHandlersCount +
 140   
((numScanQueues == 0) ? "" : " scanQueues=" + numScanQueues +
 141 " scanHandlers=" + scanHandlersCount));
-142 
-143 for (int i = 
0; i  numWriteQueues; ++i) {
-144   
queues.add((BlockingQueueCallRunner)
-145 
ReflectionUtils.newInstance(writeQueueClass, writeQueueInitArgs));
+142 if (writeQueueInitArgs.length  0) {
+143   
currentQueueLimit = (int) 
writeQueueInitArgs[0];
+144   
writeQueueInitArgs[0] = Math.max((int) 
writeQueueInitArgs[0],
+145 
DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT);
 146 }
-147 
-148 for (int i = 
0; i  (numReadQueues + numScanQueues); ++i) {
+147 for (int i = 
0; i  numWriteQueues; ++i) {
+148 
 149   
queues.add((BlockingQueueCallRunner)
-150 
ReflectionUtils.newInstance(readQueueClass, readQueueInitArgs));
+150 
ReflectionUtils.newInstance(writeQueueClass, writeQueueInitArgs));
 151 }
-152   }
-153 
-154   @Override
-155   protected void startHandlers(final int 
port) {
-156 
startHandlers(".write", writeHandlersCount, 
queues, 0, numWriteQueues, port);
-157 
startHandlers(".read", readHandlersCount, 
queues, numWriteQueues, numReadQueues, port);
-158 
startHandlers(".scan", scanHandlersCount, 
queues,
-159   
numWriteQueues + numReadQueues, numScanQueues, port);
-160   }
-161 
-162   @Override
-163   public boolean dispatch(final CallRunner
 callTask) throws InterruptedException {
-164 RpcServer.Call 
call = callTask.getCall();
-165 int queueIndex;
-166 if (isWriteRequest(call.getHeader(), call.param)) {
-167   queueIndex = 
writeBalancer.getNextQueue();
-168 } else if 
(numScanQueues  0  isScanRequest(call.getHeader(), call.param)) {
-169   queueIndex = 
numWriteQueues + 

[25/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcExecutor.RandomQueueBalancer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcExecutor.RandomQueueBalancer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcExecutor.RandomQueueBalancer.html
index 88ac539..5754645 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcExecutor.RandomQueueBalancer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcExecutor.RandomQueueBalancer.html
@@ -50,175 +50,186 @@
 042public abstract class RpcExecutor {
 043  private static final Log LOG = 
LogFactory.getLog(RpcExecutor.class);
 044
-045  private final AtomicInteger 
activeHandlerCount = new AtomicInteger(0);
-046  private final ListThread 
handlers;
-047  private final int handlerCount;
-048  private final String name;
-049  private final AtomicInteger 
failedHandlerCount = new AtomicInteger(0);
-050
-051  private boolean running;
-052
-053  private Configuration conf = null;
-054  private Abortable abortable = null;
+045  protected static final int 
DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT = 250;
+046  protected volatile int 
currentQueueLimit;
+047
+048  private final AtomicInteger 
activeHandlerCount = new AtomicInteger(0);
+049  private final ListThread 
handlers;
+050  private final int handlerCount;
+051  private final String name;
+052  private final AtomicInteger 
failedHandlerCount = new AtomicInteger(0);
+053
+054  private boolean running;
 055
-056  public RpcExecutor(final String name, 
final int handlerCount) {
-057this.handlers = new 
ArrayListThread(handlerCount);
-058this.handlerCount = handlerCount;
-059this.name = 
Strings.nullToEmpty(name);
-060  }
-061
-062  public RpcExecutor(final String name, 
final int handlerCount, final Configuration conf,
-063  final Abortable abortable) {
-064this(name, handlerCount);
-065this.conf = conf;
-066this.abortable = abortable;
-067  }
-068
-069  public void start(final int port) {
-070running = true;
-071startHandlers(port);
-072  }
-073
-074  public void stop() {
-075running = false;
-076for (Thread handler : handlers) {
-077  handler.interrupt();
-078}
-079  }
-080
-081  public int getActiveHandlerCount() {
-082return activeHandlerCount.get();
-083  }
-084
-085  /** Returns the length of the pending 
queue */
-086  public abstract int getQueueLength();
+056  private Configuration conf = null;
+057  private Abortable abortable = null;
+058
+059  public RpcExecutor(final String name, 
final int handlerCount) {
+060this.handlers = new 
ArrayListThread(handlerCount);
+061this.handlerCount = handlerCount;
+062this.name = 
Strings.nullToEmpty(name);
+063  }
+064
+065  public RpcExecutor(final String name, 
final int handlerCount, final Configuration conf,
+066  final Abortable abortable) {
+067this(name, handlerCount);
+068this.conf = conf;
+069this.abortable = abortable;
+070  }
+071
+072  public void start(final int port) {
+073running = true;
+074startHandlers(port);
+075  }
+076
+077  public void stop() {
+078running = false;
+079for (Thread handler : handlers) {
+080  handler.interrupt();
+081}
+082  }
+083
+084  public int getActiveHandlerCount() {
+085return activeHandlerCount.get();
+086  }
 087
-088  /** Add the request to the executor 
queue */
-089  public abstract boolean dispatch(final 
CallRunner callTask) throws InterruptedException;
+088  /** Returns the length of the pending 
queue */
+089  public abstract int getQueueLength();
 090
-091  /** Returns the list of request queues 
*/
-092  protected abstract 
ListBlockingQueueCallRunner getQueues();
+091  /** Add the request to the executor 
queue */
+092  public abstract boolean dispatch(final 
CallRunner callTask) throws InterruptedException;
 093
-094  protected void startHandlers(final int 
port) {
-095
ListBlockingQueueCallRunner callQueues = getQueues();
-096startHandlers(null, handlerCount, 
callQueues, 0, callQueues.size(), port);
-097  }
-098
-099  protected void startHandlers(final 
String nameSuffix, final int numHandlers,
-100  final 
ListBlockingQueueCallRunner callQueues,
-101  final int qindex, final int qsize, 
final int port) {
-102final String threadPrefix = name + 
Strings.nullToEmpty(nameSuffix);
-103for (int i = 0; i  numHandlers; 
i++) {
-104  final int index = qindex + (i % 
qsize);
-105  Thread t = new Thread(new 
Runnable() {
-106@Override
-107public void run() {
-108  
consumerLoop(callQueues.get(index));
-109}
-110  });
-111  t.setDaemon(true);
-112  t.setName(threadPrefix + 
"RpcServer.handler=" + handlers.size() +
-113",queue=" + index + ",port=" + 
port);
-114  t.start();
-115  LOG.debug(threadPrefix + " Start 
Handler index=" + handlers.size() 

[28/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
index d513b63..270312a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
@@ -98,12 +98,17 @@
 
 
 
+
+All Implemented Interfaces:
+ConfigurationObserver
+
 
 
 @InterfaceAudience.LimitedPrivate(value={"Coprocesssor","Phoenix"})
 @InterfaceStability.Evolving
-public class SimpleRpcScheduler
-extends RpcScheduler
+public class SimpleRpcScheduler
+extends RpcScheduler
+implements ConfigurationObserver
 A scheduler that maintains isolated handler pools for 
general,
  high-priority, and replication requests.
 
@@ -302,11 +307,17 @@ extends 
 
 void
+onConfigurationChange(org.apache.hadoop.conf.Configurationconf)
+Resize call queues;
+
+
+
+void
 start()
 Prepares for request serving.
 
 
-
+
 void
 stop()
 Stops serving new requests.
@@ -340,7 +351,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -349,7 +360,7 @@ extends 
 
 CALL_QUEUE_READ_SHARE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_READ_SHARE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_READ_SHARE_CONF_KEY
 See Also:Constant
 Field Values
 
 
@@ -359,7 +370,7 @@ extends 
 
 CALL_QUEUE_SCAN_SHARE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_SCAN_SHARE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_SCAN_SHARE_CONF_KEY
 See Also:Constant
 Field Values
 
 
@@ -369,7 +380,7 @@ extends 
 
 CALL_QUEUE_HANDLER_FACTOR_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_HANDLER_FACTOR_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_HANDLER_FACTOR_CONF_KEY
 See Also:Constant
 Field Values
 
 
@@ -379,7 +390,7 @@ extends 
 
 CALL_QUEUE_TYPE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_TYPE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_TYPE_CONF_KEY
 If set to 'deadline', uses a priority queue and 
deprioritize long-running scans
 See Also:Constant
 Field Values
 
@@ -390,7 +401,7 @@ extends 
 
 CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE
 See Also:Constant
 Field Values
 
 
@@ -400,7 +411,7 @@ extends 
 
 CALL_QUEUE_TYPE_FIFO_CONF_VALUE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_TYPE_FIFO_CONF_VALUE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CALL_QUEUE_TYPE_FIFO_CONF_VALUE
 See Also:Constant
 Field Values
 
 
@@ -410,7 +421,7 @@ extends 
 
 QUEUE_MAX_CALL_DELAY_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String QUEUE_MAX_CALL_DELAY_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String QUEUE_MAX_CALL_DELAY_CONF_KEY
 max delay in msec used to bound the deprioritized 
requests
 See Also:Constant
 Field Values
 
@@ -421,7 +432,7 @@ extends 
 
 port
-privateint port
+privateint port
 
 
 
@@ -430,7 +441,7 @@ extends 
 
 priority
-private finalPriorityFunction priority
+private finalPriorityFunction priority
 
 
 
@@ -439,7 +450,7 @@ extends 
 
 callExecutor
-private finalRpcExecutor callExecutor
+private 

[18/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
index e5d9af6..ea9ea4d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142
+2143  protected void 
closeConnection(Connection connection) {
+2144synchronized (connectionList) {
+2145  if 

[01/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site d02dd5db7 -> 89b638a43


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
--
diff --git 
a/xref/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html 
b/xref/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
index eed4399..65d184b 100644
--- a/xref/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
+++ b/xref/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
@@ -36,27 +36,27 @@
 26  
 27  {
 28private final 
HRegionServer
 regionServer;
-29private final 
String format;
-30private final 
String filter;
-31private final 
String bcv;
-32private final 
String bcn;
+29private final 
String bcv;
+30private final 
String bcn;
+31private final 
String format;
+32private final 
String filter;
 33protected static 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl.ImplData 
__jamon_setOptionalArguments(org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl.ImplData
 p_implData)
 34{
-35  if(! p_implData.getFormat__IsNotDefault())
+35  if(! p_implData.getBcv__IsNotDefault())
 36  {
-37
p_implData.setFormat("html");
+37
p_implData.setBcv("");
 38  }
-39  if(! p_implData.getFilter__IsNotDefault())
+39  if(! p_implData.getBcn__IsNotDefault())
 40  {
-41
p_implData.setFilter("general");
+41
p_implData.setBcn("");
 42  }
-43  if(! p_implData.getBcv__IsNotDefault())
+43  if(! p_implData.getFormat__IsNotDefault())
 44  {
-45
p_implData.setBcv("");
+45
p_implData.setFormat("html");
 46  }
-47  if(! p_implData.getBcn__IsNotDefault())
+47  if(! p_implData.getFilter__IsNotDefault())
 48  {
-49
p_implData.setBcn("");
+49
p_implData.setFilter("general");
 50  }
 51  return p_implData;
 52}
@@ -64,10 +64,10 @@
 54{
 55  super(p_templateManager, 
__jamon_setOptionalArguments(p_implData));
 56  regionServer = 
p_implData.getRegionServer();
-57  format = 
p_implData.getFormat();
-58  filter = 
p_implData.getFilter();
-59  bcv = 
p_implData.getBcv();
-60  bcn = 
p_implData.getBcn();
+57  bcv = 
p_implData.getBcv();
+58  bcn = 
p_implData.getBcn();
+59  format = 
p_implData.getFormat();
+60  filter = 
p_implData.getFilter();
 61}
 62
 63@Override public void 
renderNoFlush(final java.io.Writer 
jamonWriter)



[29/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/org/apache/hadoop/hbase/ipc/RpcExecutor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RpcExecutor.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RpcExecutor.html
index aad6bcd..a7adf6e 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RpcExecutor.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RpcExecutor.html
@@ -159,6 +159,14 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 conf
 
 
+protected int
+currentQueueLimit
+
+
+protected static int
+DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT
+
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger
 failedHandlerCount
 
@@ -259,13 +267,19 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 void
-start(intport)
+resizeQueues(org.apache.hadoop.conf.Configurationconf)
+Update current soft limit for executor's call queues
+
 
 
+void
+start(intport)
+
+
 protected void
 startHandlers(intport)
 
-
+
 protected void
 startHandlers(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringnameSuffix,
   intnumHandlers,
@@ -274,7 +288,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
   intqsize,
   intport)
 
-
+
 void
 stop()
 
@@ -309,13 +323,32 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 private static finalorg.apache.commons.logging.Log LOG
 
 
+
+
+
+
+
+DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT
+protected static finalint DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT
+See Also:Constant
 Field Values
+
+
+
+
+
+
+
+currentQueueLimit
+protected volatileint currentQueueLimit
+
+
 
 
 
 
 
 activeHandlerCount
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger activeHandlerCount
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger activeHandlerCount
 
 
 
@@ -324,7 +357,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 handlers
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread handlers
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread handlers
 
 
 
@@ -333,7 +366,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 handlerCount
-private finalint handlerCount
+private finalint handlerCount
 
 
 
@@ -342,7 +375,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 name
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String name
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String name
 
 
 
@@ -351,7 +384,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 failedHandlerCount
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger failedHandlerCount
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger failedHandlerCount
 
 
 
@@ -360,7 +393,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 running
-privateboolean running
+privateboolean running
 
 
 
@@ -369,7 +402,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 conf
-privateorg.apache.hadoop.conf.Configuration conf
+privateorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -378,7 +411,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 abortable
-privateAbortable abortable
+privateAbortable abortable
 
 
 
@@ -387,7 +420,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 ONE_QUEUE
-private staticRpcExecutor.QueueBalancer ONE_QUEUE
+private staticRpcExecutor.QueueBalancer ONE_QUEUE
 All requests go to the first 

[04/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/xref/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/xref/org/apache/hadoop/hbase/ipc/RpcServer.html
index 9d9e774..4d97d71 100644
--- a/xref/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/xref/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -2109,556 +2109,559 @@
 2099   @Override
 2100   public void 
onConfigurationChange(Configuration newConf) {
 2101 
initReconfigurable(newConf);
-2102   }
-2103 
-2104   private void 
initReconfigurable(Configuration confToLoad) {
-2105 this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106 if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107   
LOG.warn("* WARNING! *");
-2108   
LOG.warn("This server is configured to allow 
connections from INSECURE clients");
-2109   
LOG.warn("(" + FALLBACK_TO_INSECURE_CLIENT_AUTH 
+ " = true).");
-2110   
LOG.warn("While this option is enabled, client 
identities cannot be secured, and user");
-2111   
LOG.warn("impersonation is possible!");
-2112   
LOG.warn("For secure operation, please disable SIMPLE 
authentication as soon as possible,");
-2113   
LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in 
hbase-site.xml");
-2114   
LOG.warn("");
-2115 }
-2116   }
-2117 
-2118   /**
-2119* Subclasses of HBaseServer can override this to 
provide their own
-2120* Connection implementations.
-2121*/
-2122   protected Connection
 getConnection(SocketChannel channel, long 
time) {
-2123 return new Connection(channel,
 time);
-2124   }
-2125 
-2126   /**
-2127* Setup response for the RPC Call.
-2128*
-2129* @param response buffer to serialize the 
response into
-2130* @param call {@link Call} to which we are 
setting up the response
-2131* @param error error message, if the call 
failed
-2132* @throws IOException
-2133*/
-2134   private void 
setupResponse(ByteArrayOutputStream
 response, Call call, 
Throwable t, String error)
-2135   throws IOException {
-2136 if (response != null) response.reset();
-2137 
call.setResponse(null, null, t, error);
-2138   }
-2139 
-2140   protected void closeConnection(Connection
 connection) {
-2141 synchronized (connectionList) {
-2142   if (connectionList.remove(connection)) {
-2143 
numConnections--;
-2144   }
-2145 }
-2146 
connection.close();
-2147   }
-2148 
-2149   Configuration 
getConf() {
-2150 return conf;
-2151   }
-2152 
-2153   /** Sets the 
socket buffer size used for responding to RPCs.
-2154* @param size send size
-2155*/
-2156   @Override
-2157   public void 
setSocketSendBufSize(int size) { this.socketSendBufferSize = size; }
-2158 
+2102 if (scheduler instanceof ConfigurationObserver) {
+2103   ((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104 }
+2105   }
+2106 
+2107   private void 
initReconfigurable(Configuration confToLoad) {
+2108 this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109 if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110   
LOG.warn("* WARNING! *");
+2111   
LOG.warn("This server is configured to allow 
connections from INSECURE clients");
+2112   
LOG.warn("(" + FALLBACK_TO_INSECURE_CLIENT_AUTH 
+ " = true).");
+2113   
LOG.warn("While this option is enabled, client 
identities cannot be secured, and user");
+2114   
LOG.warn("impersonation is possible!");
+2115   
LOG.warn("For secure operation, please disable SIMPLE 
authentication as soon as possible,");
+2116   
LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in 
hbase-site.xml");
+2117   
LOG.warn("");
+2118 }
+2119   }
+2120 
+2121   /**
+2122* Subclasses of HBaseServer can override this to 
provide their own
+2123* Connection implementations.
+2124*/
+2125   protected Connection
 getConnection(SocketChannel channel, long 
time) {
+2126 return new Connection(channel,
 time);
+2127   }
+2128 
+2129   /**
+2130* Setup response for the RPC Call.
+2131*
+2132* @param response buffer to serialize the 
response into
+2133* @param call {@link Call} to which we are 
setting up the response
+2134* @param error error message, if the call 
failed
+2135* @throws IOException
+2136*/
+2137   private void 
setupResponse(ByteArrayOutputStream
 response, Call call, 
Throwable t, String error)
+2138   throws IOException {
+2139 if (response != null) response.reset();
+2140 
call.setResponse(null, null, t, error);
+2141   }
+2142 
+2143   protected void closeConnection(Connection
 connection) {
+2144 synchronized (connectionList) 

[27/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index 257a2c6..515914b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -379,166 +379,166 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateHMaster m_master
 
 
-
+
 
 
 
 
-m_frags
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
+m_serverManager
+privateServerManager m_serverManager
 
 
-
+
 
 
 
 
-m_frags__IsNotDefault
-privateboolean m_frags__IsNotDefault
+m_serverManager__IsNotDefault
+privateboolean m_serverManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled
-privateboolean m_catalogJanitorEnabled
+m_metaLocation
+privateServerName m_metaLocation
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled__IsNotDefault
-privateboolean m_catalogJanitorEnabled__IsNotDefault
+m_metaLocation__IsNotDefault
+privateboolean m_metaLocation__IsNotDefault
 
 
-
+
 
 
 
 
-m_filter
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
+m_frags
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
 
 
-
+
 
 
 
 
-m_filter__IsNotDefault
-privateboolean m_filter__IsNotDefault
+m_frags__IsNotDefault
+privateboolean m_frags__IsNotDefault
 
 
-
+
 
 
 
 
-m_deadServers
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
+m_filter
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
 
 
-
+
 
 
 
 
-m_deadServers__IsNotDefault
-privateboolean m_deadServers__IsNotDefault
+m_filter__IsNotDefault
+privateboolean m_filter__IsNotDefault
 
 
-
+
 
 
 
 
-m_format
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
+m_deadServers
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
 
 
-
+
 
 
 
 
-m_format__IsNotDefault
-privateboolean m_format__IsNotDefault
+m_deadServers__IsNotDefault
+privateboolean m_deadServers__IsNotDefault
 
 
-
+
 
 
 
 
-m_servers
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
+m_format
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
 
 
-
+
 
 
 
 
-m_servers__IsNotDefault
-privateboolean m_servers__IsNotDefault
+m_format__IsNotDefault
+privateboolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_metaLocation
-privateServerName m_metaLocation
+m_assignmentManager
+privateAssignmentManager m_assignmentManager
 
 
-
+
 
 
 
 
-m_metaLocation__IsNotDefault
-privateboolean m_metaLocation__IsNotDefault
+m_assignmentManager__IsNotDefault
+privateboolean m_assignmentManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_assignmentManager
-privateAssignmentManager m_assignmentManager
+m_servers
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
 
 
-
+
 
 
 
 
-m_assignmentManager__IsNotDefault
-privateboolean m_assignmentManager__IsNotDefault
+m_servers__IsNotDefault
+privateboolean m_servers__IsNotDefault
 
 
-
+
 
 
 
 
-m_serverManager
-privateServerManager m_serverManager
+m_catalogJanitorEnabled
+privateboolean m_catalogJanitorEnabled
 
 
-
+
 
 
 
 
-m_serverManager__IsNotDefault
-privateboolean m_serverManager__IsNotDefault
+m_catalogJanitorEnabled__IsNotDefault
+privateboolean m_catalogJanitorEnabled__IsNotDefault
 
 
 
@@ -584,247 +584,247 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 publicHMastergetMaster()
 
 
-
+
 
 
 
 
-setFrags

[23/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Call.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Call.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Call.html
index e5d9af6..ea9ea4d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Call.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Call.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142
+2143  protected void 
closeConnection(Connection connection) {
+2144synchronized (connectionList) {
+2145  if 

[02/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git a/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index 6cce0c7..810f579 100644
--- a/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ b/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -69,15 +69,15 @@
 59requiredArguments = {
 60  
@org.jamon.annotations.Argument(name = "master", type = "HMaster")},
 61optionalArguments = {
-62  
@org.jamon.annotations.Argument(name = "frags", 
type = "MapString,Integer"),
-63  
@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean"),
-64  
@org.jamon.annotations.Argument(name = "filter", type = "String"),
-65  
@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"),
-66  
@org.jamon.annotations.Argument(name = "format", type = "String"),
-67  
@org.jamon.annotations.Argument(name = "servers", type = "ListServerName"),
-68  
@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"),
-69  
@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager"),
-70  
@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager")})
+62  
@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"),
+63  
@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"),
+64  
@org.jamon.annotations.Argument(name = "frags", 
type = "MapString,Integer"),
+65  
@org.jamon.annotations.Argument(name = "filter", type = "String"),
+66  
@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"),
+67  
@org.jamon.annotations.Argument(name = "format", type = "String"),
+68  
@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager"),
+69  
@org.jamon.annotations.Argument(name = "servers", type = "ListServerName"),
+70  
@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean")})
 71  public class 
MasterStatusTmpl
 72extends org.jamon.AbstractTemplateProxy
 73  {
@@ -118,159 +118,159 @@
 108   return m_master;
 109 }
 110 private HMaster
 m_master;
-111 // 21, 1
-112 public void 
setFrags(MapString,Integer frags)
+111 // 28, 1
+112 public void 
setServerManager(ServerManager
 serverManager)
 113 {
-114   // 21, 1
-115   m_frags = frags;
-116   
m_frags__IsNotDefault = true;
+114   // 28, 1
+115   m_serverManager 
= serverManager;
+116   
m_serverManager__IsNotDefault = true;
 117 }
-118 public MapString,Integer getFrags()
+118 public ServerManager
 getServerManager()
 119 {
-120   return m_frags;
+120   return m_serverManager;
 121 }
-122 private MapString,Integer m_frags;
-123 public boolean getFrags__IsNotDefault()
+122 private ServerManager
 m_serverManager;
+123 public boolean getServerManager__IsNotDefault()
 124 {
-125   return m_frags__IsNotDefault;
+125   return m_serverManager__IsNotDefault;
 126 }
-127 private boolean m_frags__IsNotDefault;
-128 // 25, 1
-129 public void 
setCatalogJanitorEnabled(boolean 
catalogJanitorEnabled)
+127 private boolean m_serverManager__IsNotDefault;
+128 // 22, 1
+129 public void 
setMetaLocation(ServerName 
metaLocation)
 130 {
-131   // 25, 1
-132   
m_catalogJanitorEnabled = catalogJanitorEnabled;
-133   
m_catalogJanitorEnabled__IsNotDefault = true;
+131   // 22, 1
+132   m_metaLocation 
= metaLocation;
+133   
m_metaLocation__IsNotDefault = true;
 134 }
-135 public boolean getCatalogJanitorEnabled()
+135 public ServerName 
getMetaLocation()
 136 {
-137   return m_catalogJanitorEnabled;
+137   return m_metaLocation;
 138 }
-139 private boolean m_catalogJanitorEnabled;
-140 public boolean getCatalogJanitorEnabled__IsNotDefault()
+139 private ServerName 
m_metaLocation;
+140 public boolean getMetaLocation__IsNotDefault()
 141 {
-142   return m_catalogJanitorEnabled__IsNotDefault;
+142   return m_metaLocation__IsNotDefault;
 143 }
-144 private boolean m_catalogJanitorEnabled__IsNotDefault;
-145 // 26, 1
-146 public void 
setFilter(String filter)
+144 private boolean m_metaLocation__IsNotDefault;
+145 // 21, 1
+146 public void 
setFrags(MapString,Integer frags)
 147 {
-148   // 26, 1
-149   m_filter = 
filter;
-150   
m_filter__IsNotDefault = true;
+148   // 21, 1
+149   m_frags = frags;
+150   
m_frags__IsNotDefault = true;
 151 }
-152 public String getFilter()
+152 public MapString,Integer getFrags()
 153 {
-154   return m_filter;
+154   

[13/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/hbase-archetypes/hbase-archetype-builder/team-list.html
--
diff --git a/hbase-archetypes/hbase-archetype-builder/team-list.html 
b/hbase-archetypes/hbase-archetype-builder/team-list.html
index 4486d5a..ef42067 100644
--- a/hbase-archetypes/hbase-archetype-builder/team-list.html
+++ b/hbase-archetypes/hbase-archetype-builder/team-list.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2016-02-23
+Last Published: 2016-02-24
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/hbase-archetypes/hbase-client-project/checkstyle.html
--
diff --git a/hbase-archetypes/hbase-client-project/checkstyle.html 
b/hbase-archetypes/hbase-client-project/checkstyle.html
index d28da95..050d257 100644
--- a/hbase-archetypes/hbase-client-project/checkstyle.html
+++ b/hbase-archetypes/hbase-client-project/checkstyle.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2016-02-23
+Last Published: 2016-02-24
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/hbase-archetypes/hbase-client-project/dependencies.html
--
diff --git a/hbase-archetypes/hbase-client-project/dependencies.html 
b/hbase-archetypes/hbase-client-project/dependencies.html
index 7a758f8..542d700 100644
--- a/hbase-archetypes/hbase-client-project/dependencies.html
+++ b/hbase-archetypes/hbase-client-project/dependencies.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2016-02-23
+Last Published: 2016-02-24
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/hbase-archetypes/hbase-client-project/dependency-convergence.html
--
diff --git a/hbase-archetypes/hbase-client-project/dependency-convergence.html 
b/hbase-archetypes/hbase-client-project/dependency-convergence.html
index 52a3054..6eee2f5 100644
--- a/hbase-archetypes/hbase-client-project/dependency-convergence.html
+++ b/hbase-archetypes/hbase-client-project/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2016-02-23
+Last Published: 2016-02-24
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/hbase-archetypes/hbase-client-project/dependency-info.html
--
diff --git a/hbase-archetypes/hbase-client-project/dependency-info.html 
b/hbase-archetypes/hbase-client-project/dependency-info.html
index 7d27e42..3a2b4c0 100644
--- a/hbase-archetypes/hbase-client-project/dependency-info.html
+++ b/hbase-archetypes/hbase-client-project/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2016-02-23
+Last Published: 

[12/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/modules.html
--
diff --git a/modules.html b/modules.html
index 2ebaa2b..351b6b4 100644
--- a/modules.html
+++ b/modules.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Modules
 
@@ -375,7 +375,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/old_news.html
--
diff --git a/old_news.html b/old_news.html
index 17837b0..b68f038 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Old Apache HBase (TM) News
@@ -413,7 +413,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/plugin-management.html
--
diff --git a/plugin-management.html b/plugin-management.html
index b40d920..a571e38 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Plugin Management
 
@@ -423,7 +423,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/plugins.html
--
diff --git a/plugins.html b/plugins.html
index 89e1400..694f5c2 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Build Plugins
 
@@ -366,7 +366,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/poweredbyhbase.html
--
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index ff14efc..558d0c4 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Powered By Apache HBase™
 
@@ -768,7 +768,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/project-info.html
--
diff --git a/project-info.html b/project-info.html
index 991de9a..1c5723c 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Information
 
@@ -340,7 +340,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/project-reports.html
--
diff --git a/project-reports.html b/project-reports.html
index 22d0d3e..f4d92e8 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Generated Reports
 
@@ -310,7 +310,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-02-23
+  Last Published: 
2016-02-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/project-summary.html
--
diff --git a/project-summary.html b/project-summary.html
index 271bee7..b98cf54 100644
--- 

[24/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
index e5d9af6..ea9ea4d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142

[20/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.html
index e5d9af6..ea9ea4d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Listener.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142
+2143  protected void 
closeConnection(Connection connection) {
+2144synchronized 

[19/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Responder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Responder.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Responder.html
index e5d9af6..ea9ea4d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Responder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Responder.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142
+2143  protected void 
closeConnection(Connection connection) {
+2144synchronized 

[11/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/testdevapidocs/src-html/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
index d9b56af..a654855 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
@@ -64,274 +64,313 @@
 056import 
java.util.concurrent.CountDownLatch;
 057
 058import static 
org.junit.Assert.assertEquals;
-059import static 
org.junit.Assert.assertNotEquals;
-060import static org.mockito.Matchers.any;
-061import static 
org.mockito.Matchers.anyObject;
-062import static org.mockito.Matchers.eq;
-063import static 
org.mockito.Mockito.doAnswer;
-064import static org.mockito.Mockito.mock;
-065import static 
org.mockito.Mockito.timeout;
-066import static 
org.mockito.Mockito.verify;
-067import static org.mockito.Mockito.when;
-068
-069@Category({RPCTests.class, 
SmallTests.class})
-070public class TestSimpleRpcScheduler {
-071  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-072  
withLookingForStuckThread(true).build();
-073  private static final Log LOG = 
LogFactory.getLog(TestSimpleRpcScheduler.class);
-074
-075  private final RpcScheduler.Context 
CONTEXT = new RpcScheduler.Context() {
-076@Override
-077public InetSocketAddress 
getListenerAddress() {
-078  return 
InetSocketAddress.createUnresolved("127.0.0.1", 1000);
-079}
-080  };
-081  private Configuration conf;
-082
-083  @Before
-084  public void setUp() {
-085conf = HBaseConfiguration.create();
-086  }
-087
-088  @Test
-089  public void testBasic() throws 
IOException, InterruptedException {
-090PriorityFunction qosFunction = 
mock(PriorityFunction.class);
-091RpcScheduler scheduler = new 
SimpleRpcScheduler(
-092conf, 10, 0, 0, qosFunction, 
0);
-093scheduler.init(CONTEXT);
-094scheduler.start();
-095CallRunner task = createMockTask();
-096task.setStatus(new 
MonitoredRPCHandlerImpl());
-097scheduler.dispatch(task);
-098verify(task, timeout(1000)).run();
-099scheduler.stop();
-100  }
-101
-102  @Test
-103  public void testHandlerIsolation() 
throws IOException, InterruptedException {
-104CallRunner generalTask = 
createMockTask();
-105CallRunner priorityTask = 
createMockTask();
-106CallRunner replicationTask = 
createMockTask();
-107ListCallRunner tasks = 
ImmutableList.of(
-108generalTask,
-109priorityTask,
-110replicationTask);
-111MapCallRunner, Integer qos = 
ImmutableMap.of(
-112generalTask, 0,
-113priorityTask, HConstants.HIGH_QOS 
+ 1,
-114replicationTask, 
HConstants.REPLICATION_QOS);
-115PriorityFunction qosFunction = 
mock(PriorityFunction.class);
-116final MapCallRunner, Thread 
handlerThreads = Maps.newHashMap();
-117final CountDownLatch countDownLatch = 
new CountDownLatch(tasks.size());
-118AnswerVoid answerToRun = new 
AnswerVoid() {
-119  @Override
-120  public Void answer(InvocationOnMock 
invocationOnMock) throws Throwable {
-121synchronized (handlerThreads) {
-122  handlerThreads.put(
-123  (CallRunner) 
invocationOnMock.getMock(),
-124  Thread.currentThread());
-125}
-126countDownLatch.countDown();
-127return null;
-128  }
-129};
-130for (CallRunner task : tasks) {
-131  task.setStatus(new 
MonitoredRPCHandlerImpl());
-132  
doAnswer(answerToRun).when(task).run();
-133}
-134
-135RpcScheduler scheduler = new 
SimpleRpcScheduler(
-136conf, 1, 1 ,1, qosFunction, 
HConstants.HIGH_QOS);
-137scheduler.init(CONTEXT);
-138scheduler.start();
-139for (CallRunner task : tasks) {
-140  
when(qosFunction.getPriority((RPCProtos.RequestHeader) anyObject(),
-141(Message) anyObject(), (User) 
anyObject()))
-142  .thenReturn(qos.get(task));
-143  scheduler.dispatch(task);
-144}
-145for (CallRunner task : tasks) {
-146  verify(task, 
timeout(1000)).run();
-147}
-148scheduler.stop();
-149
-150// Tests that these requests are 
handled by three distinct threads.
-151countDownLatch.await();
-152assertEquals(3, 
ImmutableSet.copyOf(handlerThreads.values()).size());
-153  }
-154
-155  private CallRunner createMockTask() {
-156Call call = mock(Call.class);
-157CallRunner task = 
mock(CallRunner.class);
-158
when(task.getCall()).thenReturn(call);
-159return task;
-160  }
-161
-162  @Test
-163  public void testRpcScheduler() throws 
Exception {
-164
testRpcScheduler(SimpleRpcScheduler.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE);
-165

[09/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMasterRpcServices.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMasterRpcServices.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMasterRpcServices.html
index 1bdbf69..f83e138 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMasterRpcServices.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.MyMasterRpcServices.html
@@ -86,478 +86,483 @@
 078import org.junit.Rule;
 079import org.junit.Test;
 080import 
org.junit.experimental.categories.Category;
-081import org.junit.rules.TestRule;
-082
-083import com.google.common.base.Joiner;
-084import 
com.google.protobuf.RpcController;
-085import 
com.google.protobuf.ServiceException;
-086
-087/**
-088 * Like {@link 
TestRegionMergeTransaction} in that we're testing
-089 * {@link RegionMergeTransactionImpl} 
only the below tests are against a running
-090 * cluster where {@link 
TestRegionMergeTransaction} is tests against bare
-091 * {@link HRegion}.
-092 */
-093@Category({RegionServerTests.class, 
LargeTests.class})
-094public class 
TestRegionMergeTransactionOnCluster {
-095  private static final Log LOG = 
LogFactory
-096  
.getLog(TestRegionMergeTransactionOnCluster.class);
-097  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-098  
withLookingForStuckThread(true).build();
-099  private static final int NB_SERVERS = 
3;
-100
-101  private static final byte[] FAMILYNAME 
= Bytes.toBytes("fam");
-102  private static final byte[] QUALIFIER = 
Bytes.toBytes("q");
-103
-104  private static byte[] ROW = 
Bytes.toBytes("testRow");
-105  private static final int 
INITIAL_REGION_NUM = 10;
-106  private static final int ROWSIZE = 
200;
-107  private static byte[][] ROWS = 
makeN(ROW, ROWSIZE);
-108
-109  private static int waitTime = 60 * 
1000;
+081import org.junit.rules.TestName;
+082import org.junit.rules.TestRule;
+083
+084import com.google.common.base.Joiner;
+085import 
com.google.protobuf.RpcController;
+086import 
com.google.protobuf.ServiceException;
+087
+088/**
+089 * Like {@link 
TestRegionMergeTransaction} in that we're testing
+090 * {@link RegionMergeTransactionImpl} 
only the below tests are against a running
+091 * cluster where {@link 
TestRegionMergeTransaction} is tests against bare
+092 * {@link HRegion}.
+093 */
+094@Category({RegionServerTests.class, 
LargeTests.class})
+095public class 
TestRegionMergeTransactionOnCluster {
+096  private static final Log LOG = 
LogFactory
+097  
.getLog(TestRegionMergeTransactionOnCluster.class);
+098  @Rule public TestName name = new 
TestName();
+099  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
+100  
withLookingForStuckThread(true).build();
+101  private static final int NB_SERVERS = 
3;
+102
+103  private static final byte[] FAMILYNAME 
= Bytes.toBytes("fam");
+104  private static final byte[] QUALIFIER = 
Bytes.toBytes("q");
+105
+106  private static byte[] ROW = 
Bytes.toBytes("testRow");
+107  private static final int 
INITIAL_REGION_NUM = 10;
+108  private static final int ROWSIZE = 
200;
+109  private static byte[][] ROWS = 
makeN(ROW, ROWSIZE);
 110
-111  static final HBaseTestingUtility 
TEST_UTIL = new HBaseTestingUtility();
+111  private static int waitTime = 60 * 
1000;
 112
-113  private static HMaster MASTER;
-114  private static Admin ADMIN;
-115
-116  @BeforeClass
-117  public static void beforeAllTests() 
throws Exception {
-118// Start a cluster
-119TEST_UTIL.startMiniCluster(1, 
NB_SERVERS, null, MyMaster.class, null);
-120MiniHBaseCluster cluster = 
TEST_UTIL.getHBaseCluster();
-121MASTER = cluster.getMaster();
-122MASTER.balanceSwitch(false);
-123ADMIN = 
TEST_UTIL.getConnection().getAdmin();
-124  }
-125
-126  @AfterClass
-127  public static void afterAllTests() 
throws Exception {
-128TEST_UTIL.shutdownMiniCluster();
-129if (ADMIN != null) ADMIN.close();
-130  }
-131
-132  @Test
-133  public void testWholesomeMerge() throws 
Exception {
-134LOG.info("Starting 
testWholesomeMerge");
-135final TableName tableName =
-136
TableName.valueOf("testWholesomeMerge");
-137
-138// Create table and load data.
-139Table table = 
createTableAndLoadData(MASTER, tableName);
-140// Merge 1st and 2nd region
-141
mergeRegionsAndVerifyRegionNum(MASTER, tableName, 0, 1,
-142INITIAL_REGION_NUM - 1);
-143
-144// Merge 2nd and 3th region
-145PairOfSameTypeHRegionInfo 
mergedRegions =
-146  
mergeRegionsAndVerifyRegionNum(MASTER, 

[16/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index 52d47c0..b8ef76a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -67,15 +67,15 @@
 059  requiredArguments = {
 060@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 061  optionalArguments = {
-062@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-063@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
-064@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-065@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-066@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-067@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-068@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-069@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-070@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager")})
+062@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+063@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+064@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+065@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+066@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+067@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+068@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
+069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
+070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean")})
 071public class MasterStatusTmpl
 072  extends 
org.jamon.AbstractTemplateProxy
 073{
@@ -116,159 +116,159 @@
 108  return m_master;
 109}
 110private HMaster m_master;
-111// 21, 1
-112public void 
setFrags(MapString,Integer frags)
+111// 28, 1
+112public void 
setServerManager(ServerManager serverManager)
 113{
-114  // 21, 1
-115  m_frags = frags;
-116  m_frags__IsNotDefault = true;
+114  // 28, 1
+115  m_serverManager = serverManager;
+116  m_serverManager__IsNotDefault = 
true;
 117}
-118public MapString,Integer 
getFrags()
+118public ServerManager 
getServerManager()
 119{
-120  return m_frags;
+120  return m_serverManager;
 121}
-122private MapString,Integer 
m_frags;
-123public boolean 
getFrags__IsNotDefault()
+122private ServerManager 
m_serverManager;
+123public boolean 
getServerManager__IsNotDefault()
 124{
-125  return m_frags__IsNotDefault;
+125  return 
m_serverManager__IsNotDefault;
 126}
-127private boolean 
m_frags__IsNotDefault;
-128// 25, 1
-129public void 
setCatalogJanitorEnabled(boolean catalogJanitorEnabled)
+127private boolean 
m_serverManager__IsNotDefault;
+128// 22, 1
+129public void 
setMetaLocation(ServerName metaLocation)
 130{
-131  // 25, 1
-132  m_catalogJanitorEnabled = 
catalogJanitorEnabled;
-133  
m_catalogJanitorEnabled__IsNotDefault = true;
+131  // 22, 1
+132  m_metaLocation = metaLocation;
+133  m_metaLocation__IsNotDefault = 
true;
 134}
-135public boolean 
getCatalogJanitorEnabled()
+135public ServerName getMetaLocation()
 136{
-137  return m_catalogJanitorEnabled;
+137  return m_metaLocation;
 138}
-139private boolean 
m_catalogJanitorEnabled;
-140public boolean 
getCatalogJanitorEnabled__IsNotDefault()
+139private ServerName m_metaLocation;
+140public boolean 
getMetaLocation__IsNotDefault()
 141{
-142  return 
m_catalogJanitorEnabled__IsNotDefault;
+142  return 
m_metaLocation__IsNotDefault;
 143}
-144private boolean 
m_catalogJanitorEnabled__IsNotDefault;
-145// 26, 1
-146public void setFilter(String 
filter)
+144private boolean 
m_metaLocation__IsNotDefault;
+145// 21, 1
+146public void 
setFrags(MapString,Integer frags)
 147{
-148  // 26, 1
-149  m_filter = filter;
-150  m_filter__IsNotDefault = true;
+148  // 21, 1
+149  m_frags = frags;
+150  m_frags__IsNotDefault = true;
 151}
-152public String getFilter()
+152public MapString,Integer 
getFrags()
 153{
-154  

[07/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref-test/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
--
diff --git a/xref-test/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html 
b/xref-test/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
index 3a2e696..b6783ba 100644
--- a/xref-test/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
+++ b/xref-test/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.html
@@ -66,274 +66,313 @@
 56  import java.util.concurrent.CountDownLatch;
 57  
 58  import static 
org.junit.Assert.assertEquals;
-59  import static 
org.junit.Assert.assertNotEquals;
-60  import static 
org.mockito.Matchers.any;
-61  import static 
org.mockito.Matchers.anyObject;
-62  import static 
org.mockito.Matchers.eq;
-63  import static 
org.mockito.Mockito.doAnswer;
-64  import static 
org.mockito.Mockito.mock;
-65  import static 
org.mockito.Mockito.timeout;
-66  import static 
org.mockito.Mockito.verify;
-67  import static 
org.mockito.Mockito.when;
-68  
-69  
@Category({RPCTests.class, 
SmallTests.class})
-70  public class 
TestSimpleRpcScheduler
 {
-71@Rule public final 
TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-72
withLookingForStuckThread(true).build();
-73private static final 
Log LOG = LogFactory.getLog(TestSimpleRpcScheduler.class);
-74  
-75private final 
RpcScheduler.Context CONTEXT = new 
RpcScheduler.Context() {
-76  @Override
-77  public InetSocketAddress getListenerAddress() {
-78return InetSocketAddress.createUnresolved("127.0.0.1", 1000);
-79  }
-80};
-81private Configuration conf;
-82  
-83@Before
-84public void 
setUp() {
-85  conf = 
HBaseConfiguration.create();
-86}
-87  
-88@Test
-89public void 
testBasic() throws IOException, 
InterruptedException {
-90  PriorityFunction 
qosFunction = mock(PriorityFunction.class);
-91  RpcScheduler 
scheduler = new SimpleRpcScheduler(
-92  conf, 10, 0, 0, 
qosFunction, 0);
-93  
scheduler.init(CONTEXT);
-94  scheduler.start();
-95  CallRunner task = 
createMockTask();
-96  
task.setStatus(new 
MonitoredRPCHandlerImpl());
-97  
scheduler.dispatch(task);
-98  verify(task, 
timeout(1000)).run();
-99  scheduler.stop();
-100   }
-101 
-102   @Test
-103   public void 
testHandlerIsolation() throws IOException, 
InterruptedException {
-104 CallRunner 
generalTask = createMockTask();
-105 CallRunner 
priorityTask = createMockTask();
-106 CallRunner 
replicationTask = createMockTask();
-107 
ListCallRunner tasks = ImmutableList.of(
-108 generalTask,
-109 priorityTask,
-110 
replicationTask);
-111 
MapCallRunner, Integer qos = ImmutableMap.of(
-112 generalTask, 
0,
-113 priorityTask, 
HConstants.HIGH_QOS + 1,
-114 
replicationTask, HConstants.REPLICATION_QOS);
-115 PriorityFunction 
qosFunction = mock(PriorityFunction.class);
-116 final MapCallRunner, Thread handlerThreads 
= Maps.newHashMap();
-117 final CountDownLatch countDownLatch = new CountDownLatch(tasks.size());
-118 
AnswerVoid answerToRun = new 
AnswerVoid() {
-119   @Override
-120   public Void answer(InvocationOnMock 
invocationOnMock) throws Throwable {
-121 synchronized (handlerThreads) {
-122   
handlerThreads.put(
-123   
(CallRunner) invocationOnMock.getMock(),
-124   
Thread.currentThread());
-125 }
-126 
countDownLatch.countDown();
-127 return null;
-128   }
-129 };
-130 for (CallRunner task : tasks) {
-131   
task.setStatus(new 
MonitoredRPCHandlerImpl());
-132   
doAnswer(answerToRun).when(task).run();
-133 }
-134 
-135 RpcScheduler 
scheduler = new SimpleRpcScheduler(
-136 conf, 1, 1 
,1, qosFunction, HConstants.HIGH_QOS);
-137 
scheduler.init(CONTEXT);
-138 scheduler.start();
-139 for (CallRunner task : tasks) {
-140   
when(qosFunction.getPriority((RPCProtos.RequestHeader) anyObject(),
-141 (Message) 
anyObject(), (User) anyObject()))
-142   
.thenReturn(qos.get(task));
-143   
scheduler.dispatch(task);
-144 }
-145 for (CallRunner task : tasks) {
-146   verify(task, 
timeout(1000)).run();
-147 }
-148 scheduler.stop();
-149 
-150 // Tests that these requests are handled by three distinct 
threads.
-151 
countDownLatch.await();
-152 assertEquals(3, 
ImmutableSet.copyOf(handlerThreads.values()).size());
-153   }
-154 
-155   private CallRunner createMockTask() {
-156 Call call = 
mock(Call.class);
-157 CallRunner task = 
mock(CallRunner.class);
-158 
when(task.getCall()).thenReturn(call);
-159 return task;
-160   }
-161 
-162   @Test
-163   public void 
testRpcScheduler() throws Exception {
-164 

[22/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Connection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Connection.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Connection.html
index e5d9af6..ea9ea4d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Connection.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.Connection.html
@@ -2107,556 +2107,559 @@
 2099  @Override
 2100  public void 
onConfigurationChange(Configuration newConf) {
 2101initReconfigurable(newConf);
-2102  }
-2103
-2104  private void 
initReconfigurable(Configuration confToLoad) {
-2105this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
-2106if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
-2107  LOG.warn("* WARNING! 
*");
-2108  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
-2109  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
-2110  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
-2111  LOG.warn("impersonation is 
possible!");
-2112  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
-2113  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
-2114  
LOG.warn("");
-2115}
-2116  }
-2117
-2118  /**
-2119   * Subclasses of HBaseServer can 
override this to provide their own
-2120   * Connection implementations.
-2121   */
-2122  protected Connection 
getConnection(SocketChannel channel, long time) {
-2123return new Connection(channel, 
time);
-2124  }
-2125
-2126  /**
-2127   * Setup response for the RPC Call.
-2128   *
-2129   * @param response buffer to serialize 
the response into
-2130   * @param call {@link Call} to which 
we are setting up the response
-2131   * @param error error message, if the 
call failed
-2132   * @throws IOException
-2133   */
-2134  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
-2135  throws IOException {
-2136if (response != null) 
response.reset();
-2137call.setResponse(null, null, t, 
error);
-2138  }
-2139
-2140  protected void 
closeConnection(Connection connection) {
-2141synchronized (connectionList) {
-2142  if 
(connectionList.remove(connection)) {
-2143numConnections--;
-2144  }
-2145}
-2146connection.close();
-2147  }
-2148
-2149  Configuration getConf() {
-2150return conf;
-2151  }
-2152
-2153  /** Sets the socket buffer size used 
for responding to RPCs.
-2154   * @param size send size
-2155   */
-2156  @Override
-2157  public void setSocketSendBufSize(int 
size) { this.socketSendBufferSize = size; }
-2158
+2102if (scheduler instanceof 
ConfigurationObserver) {
+2103  
((ConfigurationObserver)scheduler).onConfigurationChange(newConf);
+2104}
+2105  }
+2106
+2107  private void 
initReconfigurable(Configuration confToLoad) {
+2108this.allowFallbackToSimpleAuth = 
confToLoad.getBoolean(FALLBACK_TO_INSECURE_CLIENT_AUTH, false);
+2109if (isSecurityEnabled  
allowFallbackToSimpleAuth) {
+2110  LOG.warn("* WARNING! 
*");
+2111  LOG.warn("This server is 
configured to allow connections from INSECURE clients");
+2112  LOG.warn("(" + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = true).");
+2113  LOG.warn("While this option is 
enabled, client identities cannot be secured, and user");
+2114  LOG.warn("impersonation is 
possible!");
+2115  LOG.warn("For secure operation, 
please disable SIMPLE authentication as soon as possible,");
+2116  LOG.warn("by setting " + 
FALLBACK_TO_INSECURE_CLIENT_AUTH + " = false in hbase-site.xml");
+2117  
LOG.warn("");
+2118}
+2119  }
+2120
+2121  /**
+2122   * Subclasses of HBaseServer can 
override this to provide their own
+2123   * Connection implementations.
+2124   */
+2125  protected Connection 
getConnection(SocketChannel channel, long time) {
+2126return new Connection(channel, 
time);
+2127  }
+2128
+2129  /**
+2130   * Setup response for the RPC Call.
+2131   *
+2132   * @param response buffer to serialize 
the response into
+2133   * @param call {@link Call} to which 
we are setting up the response
+2134   * @param error error message, if the 
call failed
+2135   * @throws IOException
+2136   */
+2137  private void 
setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String 
error)
+2138  throws IOException {
+2139if (response != null) 
response.reset();
+2140call.setResponse(null, null, t, 
error);
+2141  }
+2142
+2143  protected void 
closeConnection(Connection connection) {
+2144synchronized 

hbase git commit: HBASE-15312 Update the dependences of pom for mini cluster in HBase Book (Liu Shaohui)

2016-02-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 30cec72f9 -> 2a306437a


HBASE-15312 Update the dependences of pom for mini cluster in HBase Book (Liu 
Shaohui)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2a306437
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2a306437
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2a306437

Branch: refs/heads/master
Commit: 2a306437aaccb99ff333ab41c7165333994eba48
Parents: 30cec72
Author: stack 
Authored: Wed Feb 24 09:31:10 2016 -0800
Committer: stack 
Committed: Wed Feb 24 09:31:10 2016 -0800

--
 src/main/asciidoc/_chapters/unit_testing.adoc | 60 ++
 1 file changed, 39 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2a306437/src/main/asciidoc/_chapters/unit_testing.adoc
--
diff --git a/src/main/asciidoc/_chapters/unit_testing.adoc 
b/src/main/asciidoc/_chapters/unit_testing.adoc
index e1bcf87..15b6cce 100644
--- a/src/main/asciidoc/_chapters/unit_testing.adoc
+++ b/src/main/asciidoc/_chapters/unit_testing.adoc
@@ -268,37 +268,55 @@ Check the versions to be sure they are appropriate.
 
 [source,xml]
 
+
+  2.0.0-SNAPSHOT
+  2.7.1
+
 
-
-org.apache.hadoop
-hadoop-common
-2.0.0
+
+  
+org.apache.hbase
+hbase-server
+${hbase.version}
+test
+  
+  
+org.apache.hbase
+hbase-server
+${hbase.version}
 test-jar
 test
-
-
-
+  
+  
 org.apache.hbase
-hbase
-0.98.3
+hbase-hadoop-compat
+${hbase.version}
 test-jar
 test
-
+  
 
-
+  
 org.apache.hadoop
-hadoop-hdfs
-2.0.0
+hadoop-common
+${hadoop.version}
 test-jar
 test
-
-
-
+  
+  
+org.apache.hbase
+hbase-hadoop2-compat
+${hbase.version}
+test-jar
+test
+  
+  
 org.apache.hadoop
 hadoop-hdfs
-2.0.0
+${hadoop.version}
+test-jar
 test
-
+  
+
 
 
 This code represents an integration test for the MyDAO insert shown in 
<>.
@@ -309,7 +327,8 @@ This code represents an integration test for the MyDAO 
insert shown in <

[3/3] hbase git commit: HBASE-15302 Reenable the other tests disabled by HBASE-14678

2016-02-24 Thread stack
HBASE-15302 Reenable the other tests disabled by HBASE-14678

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/30cec72f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/30cec72f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/30cec72f

Branch: refs/heads/master
Commit: 30cec72f9ade972d7e9ce4bba527b0e6074cae60
Parents: 876a6ab
Author: Phil Yang 
Authored: Mon Feb 22 14:17:24 2016 +0800
Committer: stack 
Committed: Wed Feb 24 07:14:01 2016 -0800

--
 .../apache/hadoop/hbase/wal/WALSplitter.java|   11 +-
 .../hbase/TestPartialResultsFromClientSide.java |  832 
 .../TestMobSnapshotCloneIndependence.java   |   69 +
 .../client/TestSnapshotCloneIndependence.java   |  481 +
 .../master/TestDistributedLogSplitting.java | 1799 ++
 .../balancer/TestStochasticLoadBalancer2.java   |   90 +
 .../TestMasterFailoverWithProcedures.java   |  514 +
 .../TestMobFlushSnapshotFromClient.java |   72 +
 .../apache/hadoop/hbase/wal/TestWALSplit.java   | 1320 +
 .../hbase/wal/TestWALSplitCompressed.java   |   36 +
 .../hbase/client/TestReplicationShell.java  |   37 +
 11 files changed, 5256 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 54b82b2..010fd37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -515,13 +515,14 @@ public class WALSplitter {
* @param fs
* @param logEntry
* @param rootDir HBase root dir.
-   * @param fileBeingSplit the file being split currently. Used to generate 
tmp file name.
+   * @param fileNameBeingSplit the file being split currently. Used to 
generate tmp file name.
* @return Path to file into which to dump split log edits.
* @throws IOException
*/
   @SuppressWarnings("deprecation")
-  private static Path getRegionSplitEditsPath(final FileSystem fs,
-  final Entry logEntry, final Path rootDir, FileStatus fileBeingSplit)
+  @VisibleForTesting
+  static Path getRegionSplitEditsPath(final FileSystem fs,
+  final Entry logEntry, final Path rootDir, String fileNameBeingSplit)
   throws IOException {
 Path tableDir = FSUtils.getTableDir(rootDir, 
logEntry.getKey().getTablename());
 String encodedRegionName = 
Bytes.toString(logEntry.getKey().getEncodedRegionName());
@@ -556,7 +557,7 @@ public class WALSplitter {
 // Append file name ends with RECOVERED_LOG_TMPFILE_SUFFIX to ensure
 // region's replayRecoveredEdits will not delete it
 String fileName = 
formatRecoveredEditsFileName(logEntry.getKey().getSequenceId());
-fileName = getTmpRecoveredEditsFileName(fileName + "-" + 
fileBeingSplit.getPath().getName());
+fileName = getTmpRecoveredEditsFileName(fileName + "-" + 
fileNameBeingSplit);
 return new Path(dir, fileName);
   }
 
@@ -1518,7 +1519,7 @@ public class WALSplitter {
  * @return a path with a write for that path. caller should close.
  */
 private WriterAndPath createWAP(byte[] region, Entry entry, Path rootdir) 
throws IOException {
-  Path regionedits = getRegionSplitEditsPath(fs, entry, rootdir, 
fileBeingSplit);
+  Path regionedits = getRegionSplitEditsPath(fs, entry, rootdir, 
fileBeingSplit.getPath().getName());
   if (regionedits == null) {
 return null;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
new file mode 100644
index 000..a6f8373
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
@@ -0,0 +1,832 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License 

[1/3] hbase git commit: HBASE-15302 Reenable the other tests disabled by HBASE-14678

2016-02-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 876a6ab73 -> 30cec72f9


http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
new file mode 100644
index 000..125f5a1
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
@@ -0,0 +1,514 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
+import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
+import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.CreateTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DisableTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.EnableTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.TruncateTableState;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.ModifyRegionUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+@Category({MasterTests.class, LargeTests.class})
+public class TestMasterFailoverWithProcedures {
+  private static final Log LOG = 
LogFactory.getLog(TestMasterFailoverWithProcedures.class);
+
+  protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+
+  private static void setupConf(Configuration conf) {
+// don't waste time retrying with the roll, the test is already slow 
enough.
+conf.setInt("hbase.procedure.store.wal.max.retries.before.roll", 1);
+conf.setInt("hbase.procedure.store.wal.wait.before.roll", 0);
+conf.setInt("hbase.procedure.store.wal.max.roll.retries", 1);
+conf.setInt("hbase.procedure.store.wal.sync.failure.roll.max", 1);
+  }
+
+  @Before
+  public void setup() throws Exception {
+setupConf(UTIL.getConfiguration());
+UTIL.startMiniCluster(2, 1);
+
+final ProcedureExecutor procExec = 
getMasterProcedureExecutor();
+ProcedureTestingUtility.setToggleKillBeforeStoreUpdate(procExec, false);
+ProcedureTestingUtility.setKillBeforeStoreUpdate(procExec, false);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+try {
+  UTIL.shutdownMiniCluster();
+} catch (Exception e) {
+  LOG.warn("failure shutting down cluster", e);
+}
+  }
+
+  @Test(timeout=6)
+  public void testWalRecoverLease() throws Exception {
+final ProcedureStore masterStore =