[3/4] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
HBASE-17356 Add replica get support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ff23c022
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ff23c022
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ff23c022

Branch: refs/heads/branch-2.0
Commit: ff23c022126ab0cb557bc55cd0b38043b1ed385d
Parents: 4caf2fb
Author: zhangduo 
Authored: Tue Jan 1 21:59:37 2019 +0800
Committer: Duo Zhang 
Committed: Thu Jan 3 10:55:06 2019 +0800

--
 .../apache/hadoop/hbase/RegionLocations.java|   30 +-
 .../client/AsyncBatchRpcRetryingCaller.java |  114 +-
 .../client/AsyncConnectionConfiguration.java|   12 +
 .../hbase/client/AsyncConnectionImpl.java   |1 -
 .../hbase/client/AsyncMetaRegionLocator.java|  125 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  291 +--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  129 +-
 .../hbase/client/AsyncRegionLocatorHelper.java  |  147 ++
 .../hbase/client/AsyncRpcRetryingCaller.java|   15 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |   55 +-
 .../AsyncSingleRequestRpcRetryingCaller.java|   71 +-
 .../hbase/client/AsyncTableRegionLocator.java   |   28 +-
 .../client/AsyncTableRegionLocatorImpl.java |6 +-
 .../hbase/client/ConnectionConfiguration.java   |5 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 1897 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  208 +-
 .../apache/hadoop/hbase/util/FutureUtils.java   |   60 +
 .../hbase/client/RegionReplicaTestHelper.java   |  161 ++
 .../client/TestAsyncMetaRegionLocator.java  |   55 +-
 .../client/TestAsyncNonMetaRegionLocator.java   |  126 +-
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   20 +-
 ...TestAsyncSingleRequestRpcRetryingCaller.java |   56 +-
 .../client/TestAsyncTableLocatePrefetch.java|4 +-
 .../client/TestAsyncTableRegionReplicasGet.java |  204 ++
 .../hbase/client/TestZKAsyncRegistry.java   |   44 +-
 25 files changed, 2301 insertions(+), 1563 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ff23c022/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index fd6f3c7..f98bf03 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -56,8 +56,8 @@ public class RegionLocations {
 int index = 0;
 for (HRegionLocation loc : locations) {
   if (loc != null) {
-if (loc.getRegionInfo().getReplicaId() >= maxReplicaId) {
-  maxReplicaId = loc.getRegionInfo().getReplicaId();
+if (loc.getRegion().getReplicaId() >= maxReplicaId) {
+  maxReplicaId = loc.getRegion().getReplicaId();
   maxReplicaIdIndex = index;
 }
   }
@@ -72,7 +72,7 @@ public class RegionLocations {
   this.locations = new HRegionLocation[maxReplicaId + 1];
   for (HRegionLocation loc : locations) {
 if (loc != null) {
-  this.locations[loc.getRegionInfo().getReplicaId()] = loc;
+  this.locations[loc.getRegion().getReplicaId()] = loc;
 }
   }
 }
@@ -146,7 +146,7 @@ public class RegionLocations {
   public RegionLocations remove(HRegionLocation location) {
 if (location == null) return this;
 if (location.getRegion() == null) return this;
-int replicaId = location.getRegionInfo().getReplicaId();
+int replicaId = location.getRegion().getReplicaId();
 if (replicaId >= locations.length) return this;
 
 // check whether something to remove. HRL.compareTo() compares ONLY the
@@ -203,14 +203,14 @@ public class RegionLocations {
 // in case of region replication going down, we might have a leak here.
 int max = other.locations.length;
 
-HRegionInfo regionInfo = null;
+RegionInfo regionInfo = null;
 for (int i = 0; i < max; i++) {
   HRegionLocation thisLoc = this.getRegionLocation(i);
   HRegionLocation otherLoc = other.getRegionLocation(i);
-  if (regionInfo == null && otherLoc != null && otherLoc.getRegionInfo() 
!= null) {
+  if (regionInfo == null && otherLoc != null && otherLoc.getRegion() != 
null) {
 // regionInfo is the first non-null HRI from other RegionLocations. We 
use it to ensure that
 // all replica region infos belong to the same region with same region 
id.
-regionInfo = otherLoc.getRegionInfo();
+regionInfo = otherLoc.getRegion();
   }
 
   HRegionLocation selectedLoc = selectRegionLocation(thisLoc,
@@ -232,7 +232,7 @@ public class RegionLocations {
   

[4/4] hbase git commit: HBASE-18569 Add prefetch support for async region locator

2019-01-02 Thread zhangduo
HBASE-18569 Add prefetch support for async region locator


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4caf2fb0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4caf2fb0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4caf2fb0

Branch: refs/heads/branch-2.0
Commit: 4caf2fb0d848821d96b98dc449589eae0124b2b7
Parents: 40d2787
Author: zhangduo 
Authored: Fri Jun 22 08:48:33 2018 +0800
Committer: Duo Zhang 
Committed: Thu Jan 3 10:55:06 2019 +0800

--
 .../hbase/client/AsyncNonMetaRegionLocator.java | 75 +++---
 .../client/TestAsyncTableLocatePrefetch.java| 82 
 2 files changed, 145 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4caf2fb0/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
index f6d74a5..7e3d56c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
@@ -52,6 +52,8 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+
 /**
  * The asynchronous locator for regions other than meta.
  */
@@ -60,15 +62,23 @@ class AsyncNonMetaRegionLocator {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(AsyncNonMetaRegionLocator.class);
 
+  @VisibleForTesting
   static final String MAX_CONCURRENT_LOCATE_REQUEST_PER_TABLE =
 "hbase.client.meta.max.concurrent.locate.per.table";
 
   private static final int DEFAULT_MAX_CONCURRENT_LOCATE_REQUEST_PER_TABLE = 8;
 
+  @VisibleForTesting
+  static String LOCATE_PREFETCH_LIMIT = "hbase.client.locate.prefetch.limit";
+
+  private static final int DEFAULT_LOCATE_PREFETCH_LIMIT = 10;
+
   private final AsyncConnectionImpl conn;
 
   private final int maxConcurrentLocateRequestPerTable;
 
+  private final int locatePrefetchLimit;
+
   private final ConcurrentMap cache = new 
ConcurrentHashMap<>();
 
   private static final class LocateRequest {
@@ -168,6 +178,8 @@ class AsyncNonMetaRegionLocator {
 this.conn = conn;
 this.maxConcurrentLocateRequestPerTable = conn.getConfiguration().getInt(
   MAX_CONCURRENT_LOCATE_REQUEST_PER_TABLE, 
DEFAULT_MAX_CONCURRENT_LOCATE_REQUEST_PER_TABLE);
+this.locatePrefetchLimit =
+  conn.getConfiguration().getInt(LOCATE_PREFETCH_LIMIT, 
DEFAULT_LOCATE_PREFETCH_LIMIT);
   }
 
   private TableCache getTableCache(TableName tableName) {
@@ -223,9 +235,7 @@ class AsyncNonMetaRegionLocator {
   justification = "Called by lambda expression")
   private void addToCache(HRegionLocation loc) {
 addToCache(getTableCache(loc.getRegion().getTable()), loc);
-if (LOG.isTraceEnabled()) {
-  LOG.trace("Try adding " + loc + " to cache");
-}
+LOG.trace("Try adding {} to cache", loc);
   }
 
   private void complete(TableName tableName, LocateRequest req, 
HRegionLocation loc,
@@ -271,8 +281,10 @@ class AsyncNonMetaRegionLocator {
   // return whether we should stop the scan
   private boolean onScanNext(TableName tableName, LocateRequest req, Result 
result) {
 RegionLocations locs = MetaTableAccessor.getRegionLocations(result);
-LOG.debug("The fetched location of '{}', row='{}', locateType={} is {}", 
tableName,
-  Bytes.toStringBinary(req.row), req.locateType, locs);
+if (LOG.isDebugEnabled()) {
+  LOG.debug("The fetched location of '{}', row='{}', locateType={} is {}", 
tableName,
+Bytes.toStringBinary(req.row), req.locateType, locs);
+}
 
 if (locs == null || locs.getDefaultRegionLocation() == null) {
   complete(tableName, req, null,
@@ -294,8 +306,8 @@ class AsyncNonMetaRegionLocator {
 if (loc.getServerName() == null) {
   complete(tableName, req, null,
 new IOException(
-String.format("No server address listed for region '%s', row='%s', 
locateType=%s",
-  info.getRegionNameAsString(), Bytes.toStringBinary(req.row), 
req.locateType)));
+  String.format("No server address listed for region '%s', row='%s', 
locateType=%s",
+info.getRegionNameAsString(), Bytes.toStringBinary(req.row), 
req.locateType)));
   return true;
 }
 complete(tableName, req, loc, null);
@@ -361,7 +373,7 @@ class AsyncNonMetaRegionLocator {
   RegionInfo.createRegionName(tableName, HConstants.EMPTY_START_ROW, "", 
false);
 

[2/4] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/ff23c022/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index a826f8c..579d547 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.client;
 
 import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcChannel;
@@ -484,23 +485,23 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture getDescriptor(TableName tableName) 
{
 CompletableFuture future = new CompletableFuture<>();
-this.> newMasterCaller()
-.action(
-  (controller, stub) -> this
-  .> call(
-controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName), (s,
-c, req, done) -> s.getTableDescriptors(c, req, done), 
(resp) -> resp
-.getTableSchemaList())).call().whenComplete((tableSchemas, 
error) -> {
-  if (error != null) {
-future.completeExceptionally(error);
-return;
-  }
-  if (!tableSchemas.isEmpty()) {
-
future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
-  } else {
-future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
-  }
-});
+addListener(this.> newMasterCaller()
+  .action((controller, stub) -> this
+.> call(
+  controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName),
+  (s, c, req, done) -> s.getTableDescriptors(c, req, done),
+  (resp) -> resp.getTableSchemaList()))
+  .call(), (tableSchemas, error) -> {
+if (error != null) {
+  future.completeExceptionally(error);
+  return;
+}
+if (!tableSchemas.isEmpty()) {
+  future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
+} else {
+  future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
+}
+  });
 return future;
   }
 
@@ -583,7 +584,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableEnabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -600,7 +601,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableDisabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -629,40 +630,37 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   private CompletableFuture isTableAvailable(TableName tableName,
   Optional splitKeys) {
 CompletableFuture future = new CompletableFuture<>();
-isTableEnabled(tableName).whenComplete(
-  (enabled, error) -> {
-if (error != null) {
-  future.completeExceptionally(error);
-  return;
-}
-if (!enabled) {
-  future.complete(false);
-} else {
-  AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, 
Optional.of(tableName))
-  .whenComplete(
-(locations, error1) -> {
-  if (error1 != null) {
-future.completeExceptionally(error1);
-return;
-  }
-  List notDeployedRegions =
-  locations.stream().filter(loc -> loc.getServerName() == 
null)
-  .collect(Collectors.toList());
-  if (notDeployedRegions.size() > 0) {
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Table " + tableName + " has " + 
notDeployedRegions.size()
-  + " regions");
-}
-future.complete(false);
-return;
-  }
+

[1/4] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 40d2787e2 -> ff23c0221


http://git-wip-us.apache.org/repos/asf/hbase/blob/ff23c022/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
index d705d7c..28db7e8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.client;
 import static java.util.stream.Collectors.toList;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.RpcChannel;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,11 +32,11 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.TableName;
 import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
@@ -45,9 +45,12 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
@@ -63,7 +66,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 
 /**
  * The implementation of RawAsyncTable.
- * 
+ * 
  * The word 'Raw' means that this is a low level class. The returned {@link 
CompletableFuture} will
  * be finished inside the rpc framework thread, which means that the callbacks 
registered to the
  * {@link CompletableFuture} will also be executed inside the rpc framework 
thread. So users who use
@@ -74,6 +77,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 @InterfaceAudience.Private
 class RawAsyncTableImpl implements AsyncTable {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(RawAsyncTableImpl.class);
+
   private final AsyncConnectionImpl conn;
 
   private final TableName tableName;
@@ -204,58 +209,126 @@ class RawAsyncTableImpl implements 
AsyncTable {
 
   private  SingleRequestCallerBuilder newCaller(byte[] row, long 
rpcTimeoutNs) {
 return conn.callerFactory. single().table(tableName).row(row)
-.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
-.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
-.pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
-.startLogErrorsCnt(startLogErrorsCnt);
+  .rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
+  .operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
+  .pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
+  .startLogErrorsCnt(startLogErrorsCnt);
   }
 
   private  SingleRequestCallerBuilder newCaller(Row row, long 
rpcTimeoutNs) {
 return newCaller(row.getRow(), rpcTimeoutNs);
   }
 
+  private CompletableFuture get(Get get, int replicaId, long 
timeoutNs) {
+return this. newCaller(get, timeoutNs)
+  .action((controller, loc, stub) -> RawAsyncTableImpl
+. call(controller, loc, stub, 
get,
+  RequestConverter::buildGetRequest, (s, c, req, done) -> s.get(c, 
req, done),
+  (c, resp) -> ProtobufUtil.toResult(resp.getResult(), 
c.cellScanner(
+  .replicaId(replicaId).call();
+  }
+
+  // Connect the two futures, if the src future is done, then mark the dst 
future as done. And if
+  // the dst future is done, then cancel the src future. This is used for 
timeline consistent read.
+  private  void connect(CompletableFuture srcFuture, 
CompletableFuture dstFuture) {
+addListener(srcFuture, (r, e) -> {
+  if (e != null) {
+dstFuture.completeExceptionally(e);
+  } else {
+dstFuture.complete(r);
+  }
+});
+// The 

hbase git commit: HBASE-21662 Add append_peer_exclude_namespaces and remove_peer_exclude_namespaces shell commands

2019-01-02 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 c2c38a637 -> 40d2787e2


HBASE-21662 Add append_peer_exclude_namespaces and 
remove_peer_exclude_namespaces shell commands

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/40d2787e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/40d2787e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/40d2787e

Branch: refs/heads/branch-2.0
Commit: 40d2787e2c928b52cfaaaf6de82dc3a4993d94d0
Parents: c2c38a6
Author: meiyi 
Authored: Wed Jan 2 14:08:22 2019 +0800
Committer: Guanghao Zhang 
Committed: Thu Jan 3 10:49:04 2019 +0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 39 +++
 hbase-shell/src/main/ruby/shell.rb  |  2 +
 .../commands/append_peer_exclude_namespaces.rb  | 47 +
 .../commands/remove_peer_exclude_namespaces.rb  | 45 
 .../test/ruby/hbase/replication_admin_test.rb   | 74 
 5 files changed, 207 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/40d2787e/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index b9d4a0c..533859a 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -277,6 +277,45 @@ module Hbase
   end
 end
 
+# Append exclude namespaces config for the specified peer
+def append_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  if rpc.getExcludeNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getExcludeNamespaces)
+  end
+  namespaces.each do |n|
+ns_set.add(n)
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
+# Remove exclude namespaces config for the specified peer
+def remove_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  ns_set = rpc.getExcludeNamespaces
+  unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
+namespaces.each do |n|
+  ns_set.remove(n)
+end
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
 def set_peer_replicate_all(id, replicate_all)
   rpc = get_peer_config(id)
   return if rpc.nil?

http://git-wip-us.apache.org/repos/asf/hbase/blob/40d2787e/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index 32301dd..6184a33 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -378,6 +378,8 @@ Shell.load_command_group(
 append_peer_namespaces
 remove_peer_namespaces
 set_peer_exclude_namespaces
+append_peer_exclude_namespaces
+remove_peer_exclude_namespaces
 show_peer_tableCFs
 set_peer_tableCFs
 set_peer_exclude_tableCFs

http://git-wip-us.apache.org/repos/asf/hbase/blob/40d2787e/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
--
diff --git 
a/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb 
b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
new file mode 100644
index 000..4f500c8
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
@@ -0,0 +1,47 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY 

hbase git commit: HBASE-21662 Add append_peer_exclude_namespaces and remove_peer_exclude_namespaces shell commands

2019-01-02 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 0c3da777e -> 24c8fd02c


HBASE-21662 Add append_peer_exclude_namespaces and 
remove_peer_exclude_namespaces shell commands

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/24c8fd02
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/24c8fd02
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/24c8fd02

Branch: refs/heads/branch-2.1
Commit: 24c8fd02ca91f9667167fa1ed623916d20055680
Parents: 0c3da77
Author: meiyi 
Authored: Wed Jan 2 14:08:22 2019 +0800
Committer: Guanghao Zhang 
Committed: Thu Jan 3 10:42:34 2019 +0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 39 +++
 hbase-shell/src/main/ruby/shell.rb  |  2 +
 .../commands/append_peer_exclude_namespaces.rb  | 47 +
 .../commands/remove_peer_exclude_namespaces.rb  | 45 
 .../test/ruby/hbase/replication_admin_test.rb   | 74 
 5 files changed, 207 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/24c8fd02/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index d46b468..af2b071 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -282,6 +282,45 @@ module Hbase
   end
 end
 
+# Append exclude namespaces config for the specified peer
+def append_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  if rpc.getExcludeNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getExcludeNamespaces)
+  end
+  namespaces.each do |n|
+ns_set.add(n)
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
+# Remove exclude namespaces config for the specified peer
+def remove_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  ns_set = rpc.getExcludeNamespaces
+  unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
+namespaces.each do |n|
+  ns_set.remove(n)
+end
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
 def set_peer_replicate_all(id, replicate_all)
   rpc = get_peer_config(id)
   return if rpc.nil?

http://git-wip-us.apache.org/repos/asf/hbase/blob/24c8fd02/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index ce26da2..a15e169 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -382,6 +382,8 @@ Shell.load_command_group(
 append_peer_namespaces
 remove_peer_namespaces
 set_peer_exclude_namespaces
+append_peer_exclude_namespaces
+remove_peer_exclude_namespaces
 show_peer_tableCFs
 set_peer_tableCFs
 set_peer_exclude_tableCFs

http://git-wip-us.apache.org/repos/asf/hbase/blob/24c8fd02/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
--
diff --git 
a/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb 
b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
new file mode 100644
index 000..4f500c8
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
@@ -0,0 +1,47 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY 

hbase git commit: HBASE-21662 Add append_peer_exclude_namespaces and remove_peer_exclude_namespaces shell commands

2019-01-02 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 77ca66038 -> 90b9ed152


HBASE-21662 Add append_peer_exclude_namespaces and 
remove_peer_exclude_namespaces shell commands

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/90b9ed15
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/90b9ed15
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/90b9ed15

Branch: refs/heads/branch-2
Commit: 90b9ed15231c01f979c76620f5201bdcd4975ac9
Parents: 77ca660
Author: meiyi 
Authored: Wed Jan 2 14:08:22 2019 +0800
Committer: Guanghao Zhang 
Committed: Thu Jan 3 10:35:30 2019 +0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 39 +++
 hbase-shell/src/main/ruby/shell.rb  |  2 +
 .../commands/append_peer_exclude_namespaces.rb  | 47 +
 .../commands/remove_peer_exclude_namespaces.rb  | 45 
 .../test/ruby/hbase/replication_admin_test.rb   | 74 
 5 files changed, 207 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/90b9ed15/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index d46b468..af2b071 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -282,6 +282,45 @@ module Hbase
   end
 end
 
+# Append exclude namespaces config for the specified peer
+def append_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  if rpc.getExcludeNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getExcludeNamespaces)
+  end
+  namespaces.each do |n|
+ns_set.add(n)
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
+# Remove exclude namespaces config for the specified peer
+def remove_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  ns_set = rpc.getExcludeNamespaces
+  unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
+namespaces.each do |n|
+  ns_set.remove(n)
+end
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
 def set_peer_replicate_all(id, replicate_all)
   rpc = get_peer_config(id)
   return if rpc.nil?

http://git-wip-us.apache.org/repos/asf/hbase/blob/90b9ed15/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index 9aada3a..0ed2623 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -384,6 +384,8 @@ Shell.load_command_group(
 append_peer_namespaces
 remove_peer_namespaces
 set_peer_exclude_namespaces
+append_peer_exclude_namespaces
+remove_peer_exclude_namespaces
 show_peer_tableCFs
 set_peer_tableCFs
 set_peer_exclude_tableCFs

http://git-wip-us.apache.org/repos/asf/hbase/blob/90b9ed15/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
--
diff --git 
a/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb 
b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
new file mode 100644
index 000..4f500c8
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
@@ -0,0 +1,47 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 

hbase git commit: HBASE-21662 Add append_peer_exclude_namespaces and remove_peer_exclude_namespaces shell commands

2019-01-02 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master db66e6cc9 -> 466fa920f


HBASE-21662 Add append_peer_exclude_namespaces and 
remove_peer_exclude_namespaces shell commands

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/466fa920
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/466fa920
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/466fa920

Branch: refs/heads/master
Commit: 466fa920fee572fe20db3b77ebf539dc304d5f31
Parents: db66e6c
Author: meiyi 
Authored: Wed Jan 2 14:08:22 2019 +0800
Committer: Guanghao Zhang 
Committed: Thu Jan 3 10:21:43 2019 +0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 39 +++
 hbase-shell/src/main/ruby/shell.rb  |  2 +
 .../commands/append_peer_exclude_namespaces.rb  | 47 +
 .../commands/remove_peer_exclude_namespaces.rb  | 45 
 .../test/ruby/hbase/replication_admin_test.rb   | 74 
 5 files changed, 207 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/466fa920/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index c01b6ea..e061168 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -285,6 +285,45 @@ module Hbase
   end
 end
 
+# Append exclude namespaces config for the specified peer
+def append_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  if rpc.getExcludeNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getExcludeNamespaces)
+  end
+  namespaces.each do |n|
+ns_set.add(n)
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
+# Remove exclude namespaces config for the specified peer
+def remove_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  ns_set = rpc.getExcludeNamespaces
+  unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
+namespaces.each do |n|
+  ns_set.remove(n)
+end
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
 def set_peer_replicate_all(id, replicate_all)
   rpc = get_peer_config(id)
   return if rpc.nil?

http://git-wip-us.apache.org/repos/asf/hbase/blob/466fa920/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index 1507ca3..1f7eae6 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -384,6 +384,8 @@ Shell.load_command_group(
 append_peer_namespaces
 remove_peer_namespaces
 set_peer_exclude_namespaces
+append_peer_exclude_namespaces
+remove_peer_exclude_namespaces
 show_peer_tableCFs
 set_peer_tableCFs
 set_peer_exclude_tableCFs

http://git-wip-us.apache.org/repos/asf/hbase/blob/466fa920/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
--
diff --git 
a/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb 
b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
new file mode 100644
index 000..4f500c8
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
@@ -0,0 +1,47 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, 

[1/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master e4b6b4afb -> db66e6cc9


http://git-wip-us.apache.org/repos/asf/hbase/blob/db66e6cc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
index d705d7c..28db7e8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.client;
 import static java.util.stream.Collectors.toList;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.RpcChannel;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,11 +32,11 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.TableName;
 import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
@@ -45,9 +45,12 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
@@ -63,7 +66,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 
 /**
  * The implementation of RawAsyncTable.
- * 
+ * 
  * The word 'Raw' means that this is a low level class. The returned {@link 
CompletableFuture} will
  * be finished inside the rpc framework thread, which means that the callbacks 
registered to the
  * {@link CompletableFuture} will also be executed inside the rpc framework 
thread. So users who use
@@ -74,6 +77,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 @InterfaceAudience.Private
 class RawAsyncTableImpl implements AsyncTable {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(RawAsyncTableImpl.class);
+
   private final AsyncConnectionImpl conn;
 
   private final TableName tableName;
@@ -204,58 +209,126 @@ class RawAsyncTableImpl implements 
AsyncTable {
 
   private  SingleRequestCallerBuilder newCaller(byte[] row, long 
rpcTimeoutNs) {
 return conn.callerFactory. single().table(tableName).row(row)
-.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
-.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
-.pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
-.startLogErrorsCnt(startLogErrorsCnt);
+  .rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
+  .operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
+  .pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
+  .startLogErrorsCnt(startLogErrorsCnt);
   }
 
   private  SingleRequestCallerBuilder newCaller(Row row, long 
rpcTimeoutNs) {
 return newCaller(row.getRow(), rpcTimeoutNs);
   }
 
+  private CompletableFuture get(Get get, int replicaId, long 
timeoutNs) {
+return this. newCaller(get, timeoutNs)
+  .action((controller, loc, stub) -> RawAsyncTableImpl
+. call(controller, loc, stub, 
get,
+  RequestConverter::buildGetRequest, (s, c, req, done) -> s.get(c, 
req, done),
+  (c, resp) -> ProtobufUtil.toResult(resp.getResult(), 
c.cellScanner(
+  .replicaId(replicaId).call();
+  }
+
+  // Connect the two futures, if the src future is done, then mark the dst 
future as done. And if
+  // the dst future is done, then cancel the src future. This is used for 
timeline consistent read.
+  private  void connect(CompletableFuture srcFuture, 
CompletableFuture dstFuture) {
+addListener(srcFuture, (r, e) -> {
+  if (e != null) {
+dstFuture.completeExceptionally(e);
+  } else {
+dstFuture.complete(r);
+  }
+});
+// The cancellation 

[2/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/db66e6cc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 4f73909..869a630 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.client;
 
 import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcChannel;
@@ -491,23 +492,23 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture getDescriptor(TableName tableName) 
{
 CompletableFuture future = new CompletableFuture<>();
-this.> newMasterCaller()
-.action(
-  (controller, stub) -> this
-  .> call(
-controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName), (s,
-c, req, done) -> s.getTableDescriptors(c, req, done), 
(resp) -> resp
-.getTableSchemaList())).call().whenComplete((tableSchemas, 
error) -> {
-  if (error != null) {
-future.completeExceptionally(error);
-return;
-  }
-  if (!tableSchemas.isEmpty()) {
-
future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
-  } else {
-future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
-  }
-});
+addListener(this.> newMasterCaller()
+  .action((controller, stub) -> this
+.> call(
+  controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName),
+  (s, c, req, done) -> s.getTableDescriptors(c, req, done),
+  (resp) -> resp.getTableSchemaList()))
+  .call(), (tableSchemas, error) -> {
+if (error != null) {
+  future.completeExceptionally(error);
+  return;
+}
+if (!tableSchemas.isEmpty()) {
+  future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
+} else {
+  future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
+}
+  });
 return future;
   }
 
@@ -590,7 +591,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableEnabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -607,7 +608,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableDisabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -636,40 +637,37 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   private CompletableFuture isTableAvailable(TableName tableName,
   Optional splitKeys) {
 CompletableFuture future = new CompletableFuture<>();
-isTableEnabled(tableName).whenComplete(
-  (enabled, error) -> {
-if (error != null) {
-  future.completeExceptionally(error);
-  return;
-}
-if (!enabled) {
-  future.complete(false);
-} else {
-  AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, 
Optional.of(tableName))
-  .whenComplete(
-(locations, error1) -> {
-  if (error1 != null) {
-future.completeExceptionally(error1);
-return;
-  }
-  List notDeployedRegions =
-  locations.stream().filter(loc -> loc.getServerName() == 
null)
-  .collect(Collectors.toList());
-  if (notDeployedRegions.size() > 0) {
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Table " + tableName + " has " + 
notDeployedRegions.size()
-  + " regions");
-}
-future.complete(false);
-return;
-  }
+

[2/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/0c3da777/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 0fd0e59..b3cd17f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.client;
 
 import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcChannel;
@@ -485,23 +486,23 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture getDescriptor(TableName tableName) 
{
 CompletableFuture future = new CompletableFuture<>();
-this.> newMasterCaller()
-.action(
-  (controller, stub) -> this
-  .> call(
-controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName), (s,
-c, req, done) -> s.getTableDescriptors(c, req, done), 
(resp) -> resp
-.getTableSchemaList())).call().whenComplete((tableSchemas, 
error) -> {
-  if (error != null) {
-future.completeExceptionally(error);
-return;
-  }
-  if (!tableSchemas.isEmpty()) {
-
future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
-  } else {
-future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
-  }
-});
+addListener(this.> newMasterCaller()
+  .action((controller, stub) -> this
+.> call(
+  controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName),
+  (s, c, req, done) -> s.getTableDescriptors(c, req, done),
+  (resp) -> resp.getTableSchemaList()))
+  .call(), (tableSchemas, error) -> {
+if (error != null) {
+  future.completeExceptionally(error);
+  return;
+}
+if (!tableSchemas.isEmpty()) {
+  future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
+} else {
+  future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
+}
+  });
 return future;
   }
 
@@ -584,7 +585,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableEnabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -601,7 +602,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableDisabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -630,40 +631,37 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   private CompletableFuture isTableAvailable(TableName tableName,
   Optional splitKeys) {
 CompletableFuture future = new CompletableFuture<>();
-isTableEnabled(tableName).whenComplete(
-  (enabled, error) -> {
-if (error != null) {
-  future.completeExceptionally(error);
-  return;
-}
-if (!enabled) {
-  future.complete(false);
-} else {
-  AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, 
Optional.of(tableName))
-  .whenComplete(
-(locations, error1) -> {
-  if (error1 != null) {
-future.completeExceptionally(error1);
-return;
-  }
-  List notDeployedRegions =
-  locations.stream().filter(loc -> loc.getServerName() == 
null)
-  .collect(Collectors.toList());
-  if (notDeployedRegions.size() > 0) {
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Table " + tableName + " has " + 
notDeployedRegions.size()
-  + " regions");
-}
-future.complete(false);
-return;
-  }
+

[2/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/77ca6603/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 1edfb35..53aaf77 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.client;
 
 import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcChannel;
@@ -488,23 +489,23 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture getDescriptor(TableName tableName) 
{
 CompletableFuture future = new CompletableFuture<>();
-this.> newMasterCaller()
-.action(
-  (controller, stub) -> this
-  .> call(
-controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName), (s,
-c, req, done) -> s.getTableDescriptors(c, req, done), 
(resp) -> resp
-.getTableSchemaList())).call().whenComplete((tableSchemas, 
error) -> {
-  if (error != null) {
-future.completeExceptionally(error);
-return;
-  }
-  if (!tableSchemas.isEmpty()) {
-
future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
-  } else {
-future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
-  }
-});
+addListener(this.> newMasterCaller()
+  .action((controller, stub) -> this
+.> call(
+  controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName),
+  (s, c, req, done) -> s.getTableDescriptors(c, req, done),
+  (resp) -> resp.getTableSchemaList()))
+  .call(), (tableSchemas, error) -> {
+if (error != null) {
+  future.completeExceptionally(error);
+  return;
+}
+if (!tableSchemas.isEmpty()) {
+  future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
+} else {
+  future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
+}
+  });
 return future;
   }
 
@@ -587,7 +588,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableEnabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -604,7 +605,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableDisabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -633,40 +634,37 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   private CompletableFuture isTableAvailable(TableName tableName,
   Optional splitKeys) {
 CompletableFuture future = new CompletableFuture<>();
-isTableEnabled(tableName).whenComplete(
-  (enabled, error) -> {
-if (error != null) {
-  future.completeExceptionally(error);
-  return;
-}
-if (!enabled) {
-  future.complete(false);
-} else {
-  AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, 
Optional.of(tableName))
-  .whenComplete(
-(locations, error1) -> {
-  if (error1 != null) {
-future.completeExceptionally(error1);
-return;
-  }
-  List notDeployedRegions =
-  locations.stream().filter(loc -> loc.getServerName() == 
null)
-  .collect(Collectors.toList());
-  if (notDeployedRegions.size() > 0) {
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Table " + tableName + " has " + 
notDeployedRegions.size()
-  + " regions");
-}
-future.complete(false);
-return;
-  }
+

[3/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
HBASE-17356 Add replica get support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/db66e6cc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/db66e6cc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/db66e6cc

Branch: refs/heads/master
Commit: db66e6cc9e1c6ea027631388aba688cb623b7d0a
Parents: e4b6b4a
Author: zhangduo 
Authored: Tue Jan 1 21:59:37 2019 +0800
Committer: zhangduo 
Committed: Thu Jan 3 08:38:20 2019 +0800

--
 .../apache/hadoop/hbase/RegionLocations.java|   30 +-
 .../client/AsyncBatchRpcRetryingCaller.java |  114 +-
 .../client/AsyncConnectionConfiguration.java|   12 +
 .../hbase/client/AsyncConnectionImpl.java   |1 -
 .../hbase/client/AsyncMetaRegionLocator.java|  125 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  291 +--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  129 +-
 .../hbase/client/AsyncRegionLocatorHelper.java  |  147 ++
 .../hbase/client/AsyncRpcRetryingCaller.java|   15 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |   55 +-
 .../AsyncSingleRequestRpcRetryingCaller.java|   71 +-
 .../hbase/client/AsyncTableRegionLocator.java   |   28 +-
 .../client/AsyncTableRegionLocatorImpl.java |6 +-
 .../hbase/client/ConnectionConfiguration.java   |5 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 2033 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  208 +-
 .../apache/hadoop/hbase/util/FutureUtils.java   |   60 +
 .../hbase/client/RegionReplicaTestHelper.java   |  161 ++
 .../client/TestAsyncMetaRegionLocator.java  |   55 +-
 .../client/TestAsyncNonMetaRegionLocator.java   |  126 +-
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   20 +-
 ...TestAsyncSingleRequestRpcRetryingCaller.java |   56 +-
 .../client/TestAsyncTableLocatePrefetch.java|4 +-
 .../client/TestAsyncTableRegionReplicasGet.java |  204 ++
 .../hbase/client/TestZKAsyncRegistry.java   |   44 +-
 25 files changed, 2366 insertions(+), 1634 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/db66e6cc/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index fd6f3c7..f98bf03 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -56,8 +56,8 @@ public class RegionLocations {
 int index = 0;
 for (HRegionLocation loc : locations) {
   if (loc != null) {
-if (loc.getRegionInfo().getReplicaId() >= maxReplicaId) {
-  maxReplicaId = loc.getRegionInfo().getReplicaId();
+if (loc.getRegion().getReplicaId() >= maxReplicaId) {
+  maxReplicaId = loc.getRegion().getReplicaId();
   maxReplicaIdIndex = index;
 }
   }
@@ -72,7 +72,7 @@ public class RegionLocations {
   this.locations = new HRegionLocation[maxReplicaId + 1];
   for (HRegionLocation loc : locations) {
 if (loc != null) {
-  this.locations[loc.getRegionInfo().getReplicaId()] = loc;
+  this.locations[loc.getRegion().getReplicaId()] = loc;
 }
   }
 }
@@ -146,7 +146,7 @@ public class RegionLocations {
   public RegionLocations remove(HRegionLocation location) {
 if (location == null) return this;
 if (location.getRegion() == null) return this;
-int replicaId = location.getRegionInfo().getReplicaId();
+int replicaId = location.getRegion().getReplicaId();
 if (replicaId >= locations.length) return this;
 
 // check whether something to remove. HRL.compareTo() compares ONLY the
@@ -203,14 +203,14 @@ public class RegionLocations {
 // in case of region replication going down, we might have a leak here.
 int max = other.locations.length;
 
-HRegionInfo regionInfo = null;
+RegionInfo regionInfo = null;
 for (int i = 0; i < max; i++) {
   HRegionLocation thisLoc = this.getRegionLocation(i);
   HRegionLocation otherLoc = other.getRegionLocation(i);
-  if (regionInfo == null && otherLoc != null && otherLoc.getRegionInfo() 
!= null) {
+  if (regionInfo == null && otherLoc != null && otherLoc.getRegion() != 
null) {
 // regionInfo is the first non-null HRI from other RegionLocations. We 
use it to ensure that
 // all replica region infos belong to the same region with same region 
id.
-regionInfo = otherLoc.getRegionInfo();
+regionInfo = otherLoc.getRegion();
   }
 
   HRegionLocation selectedLoc = selectRegionLocation(thisLoc,
@@ -232,7 +232,7 @@ public class RegionLocations {
   for 

[3/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
HBASE-17356 Add replica get support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/77ca6603
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/77ca6603
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/77ca6603

Branch: refs/heads/branch-2
Commit: 77ca660389367ea92e6681dbe9db2c48525bce62
Parents: 2d8d74c
Author: zhangduo 
Authored: Tue Jan 1 21:59:37 2019 +0800
Committer: zhangduo 
Committed: Thu Jan 3 08:38:53 2019 +0800

--
 .../apache/hadoop/hbase/RegionLocations.java|   30 +-
 .../client/AsyncBatchRpcRetryingCaller.java |  114 +-
 .../client/AsyncConnectionConfiguration.java|   12 +
 .../hbase/client/AsyncConnectionImpl.java   |1 -
 .../hbase/client/AsyncMetaRegionLocator.java|  125 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  291 +--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  129 +-
 .../hbase/client/AsyncRegionLocatorHelper.java  |  147 ++
 .../hbase/client/AsyncRpcRetryingCaller.java|   15 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |   55 +-
 .../AsyncSingleRequestRpcRetryingCaller.java|   71 +-
 .../hbase/client/AsyncTableRegionLocator.java   |   28 +-
 .../client/AsyncTableRegionLocatorImpl.java |6 +-
 .../hbase/client/ConnectionConfiguration.java   |5 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 2033 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  208 +-
 .../apache/hadoop/hbase/util/FutureUtils.java   |   60 +
 .../hbase/client/RegionReplicaTestHelper.java   |  161 ++
 .../client/TestAsyncMetaRegionLocator.java  |   55 +-
 .../client/TestAsyncNonMetaRegionLocator.java   |  126 +-
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   20 +-
 ...TestAsyncSingleRequestRpcRetryingCaller.java |   56 +-
 .../client/TestAsyncTableLocatePrefetch.java|4 +-
 .../client/TestAsyncTableRegionReplicasGet.java |  204 ++
 .../hbase/client/TestZKAsyncRegistry.java   |   44 +-
 25 files changed, 2366 insertions(+), 1634 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/77ca6603/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index fd6f3c7..f98bf03 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -56,8 +56,8 @@ public class RegionLocations {
 int index = 0;
 for (HRegionLocation loc : locations) {
   if (loc != null) {
-if (loc.getRegionInfo().getReplicaId() >= maxReplicaId) {
-  maxReplicaId = loc.getRegionInfo().getReplicaId();
+if (loc.getRegion().getReplicaId() >= maxReplicaId) {
+  maxReplicaId = loc.getRegion().getReplicaId();
   maxReplicaIdIndex = index;
 }
   }
@@ -72,7 +72,7 @@ public class RegionLocations {
   this.locations = new HRegionLocation[maxReplicaId + 1];
   for (HRegionLocation loc : locations) {
 if (loc != null) {
-  this.locations[loc.getRegionInfo().getReplicaId()] = loc;
+  this.locations[loc.getRegion().getReplicaId()] = loc;
 }
   }
 }
@@ -146,7 +146,7 @@ public class RegionLocations {
   public RegionLocations remove(HRegionLocation location) {
 if (location == null) return this;
 if (location.getRegion() == null) return this;
-int replicaId = location.getRegionInfo().getReplicaId();
+int replicaId = location.getRegion().getReplicaId();
 if (replicaId >= locations.length) return this;
 
 // check whether something to remove. HRL.compareTo() compares ONLY the
@@ -203,14 +203,14 @@ public class RegionLocations {
 // in case of region replication going down, we might have a leak here.
 int max = other.locations.length;
 
-HRegionInfo regionInfo = null;
+RegionInfo regionInfo = null;
 for (int i = 0; i < max; i++) {
   HRegionLocation thisLoc = this.getRegionLocation(i);
   HRegionLocation otherLoc = other.getRegionLocation(i);
-  if (regionInfo == null && otherLoc != null && otherLoc.getRegionInfo() 
!= null) {
+  if (regionInfo == null && otherLoc != null && otherLoc.getRegion() != 
null) {
 // regionInfo is the first non-null HRI from other RegionLocations. We 
use it to ensure that
 // all replica region infos belong to the same region with same region 
id.
-regionInfo = otherLoc.getRegionInfo();
+regionInfo = otherLoc.getRegion();
   }
 
   HRegionLocation selectedLoc = selectRegionLocation(thisLoc,
@@ -232,7 +232,7 @@ public class RegionLocations {
   

[1/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 2d8d74c64 -> 77ca66038


http://git-wip-us.apache.org/repos/asf/hbase/blob/77ca6603/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
index d705d7c..28db7e8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.client;
 import static java.util.stream.Collectors.toList;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.RpcChannel;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,11 +32,11 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.TableName;
 import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
@@ -45,9 +45,12 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
@@ -63,7 +66,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 
 /**
  * The implementation of RawAsyncTable.
- * 
+ * 
  * The word 'Raw' means that this is a low level class. The returned {@link 
CompletableFuture} will
  * be finished inside the rpc framework thread, which means that the callbacks 
registered to the
  * {@link CompletableFuture} will also be executed inside the rpc framework 
thread. So users who use
@@ -74,6 +77,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 @InterfaceAudience.Private
 class RawAsyncTableImpl implements AsyncTable {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(RawAsyncTableImpl.class);
+
   private final AsyncConnectionImpl conn;
 
   private final TableName tableName;
@@ -204,58 +209,126 @@ class RawAsyncTableImpl implements 
AsyncTable {
 
   private  SingleRequestCallerBuilder newCaller(byte[] row, long 
rpcTimeoutNs) {
 return conn.callerFactory. single().table(tableName).row(row)
-.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
-.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
-.pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
-.startLogErrorsCnt(startLogErrorsCnt);
+  .rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
+  .operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
+  .pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
+  .startLogErrorsCnt(startLogErrorsCnt);
   }
 
   private  SingleRequestCallerBuilder newCaller(Row row, long 
rpcTimeoutNs) {
 return newCaller(row.getRow(), rpcTimeoutNs);
   }
 
+  private CompletableFuture get(Get get, int replicaId, long 
timeoutNs) {
+return this. newCaller(get, timeoutNs)
+  .action((controller, loc, stub) -> RawAsyncTableImpl
+. call(controller, loc, stub, 
get,
+  RequestConverter::buildGetRequest, (s, c, req, done) -> s.get(c, 
req, done),
+  (c, resp) -> ProtobufUtil.toResult(resp.getResult(), 
c.cellScanner(
+  .replicaId(replicaId).call();
+  }
+
+  // Connect the two futures, if the src future is done, then mark the dst 
future as done. And if
+  // the dst future is done, then cancel the src future. This is used for 
timeline consistent read.
+  private  void connect(CompletableFuture srcFuture, 
CompletableFuture dstFuture) {
+addListener(srcFuture, (r, e) -> {
+  if (e != null) {
+dstFuture.completeExceptionally(e);
+  } else {
+dstFuture.complete(r);
+  }
+});
+// The 

[3/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
HBASE-17356 Add replica get support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0c3da777
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0c3da777
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0c3da777

Branch: refs/heads/branch-2.1
Commit: 0c3da777e3315d0c56c524cf5e473f25db659f96
Parents: 8923b0e
Author: zhangduo 
Authored: Tue Jan 1 21:59:37 2019 +0800
Committer: zhangduo 
Committed: Thu Jan 3 09:24:55 2019 +0800

--
 .../apache/hadoop/hbase/RegionLocations.java|   30 +-
 .../client/AsyncBatchRpcRetryingCaller.java |  114 +-
 .../client/AsyncConnectionConfiguration.java|   12 +
 .../hbase/client/AsyncConnectionImpl.java   |1 -
 .../hbase/client/AsyncMetaRegionLocator.java|  125 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  291 +--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  129 +-
 .../hbase/client/AsyncRegionLocatorHelper.java  |  147 ++
 .../hbase/client/AsyncRpcRetryingCaller.java|   15 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |   55 +-
 .../AsyncSingleRequestRpcRetryingCaller.java|   71 +-
 .../hbase/client/AsyncTableRegionLocator.java   |   28 +-
 .../client/AsyncTableRegionLocatorImpl.java |6 +-
 .../hbase/client/ConnectionConfiguration.java   |5 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 1995 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  208 +-
 .../apache/hadoop/hbase/util/FutureUtils.java   |   60 +
 .../hbase/client/RegionReplicaTestHelper.java   |  161 ++
 .../client/TestAsyncMetaRegionLocator.java  |   55 +-
 .../client/TestAsyncNonMetaRegionLocator.java   |  126 +-
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   20 +-
 ...TestAsyncSingleRequestRpcRetryingCaller.java |   56 +-
 .../client/TestAsyncTableLocatePrefetch.java|4 +-
 .../client/TestAsyncTableRegionReplicasGet.java |  204 ++
 .../hbase/client/TestZKAsyncRegistry.java   |   44 +-
 25 files changed, 2347 insertions(+), 1615 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0c3da777/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index fd6f3c7..f98bf03 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -56,8 +56,8 @@ public class RegionLocations {
 int index = 0;
 for (HRegionLocation loc : locations) {
   if (loc != null) {
-if (loc.getRegionInfo().getReplicaId() >= maxReplicaId) {
-  maxReplicaId = loc.getRegionInfo().getReplicaId();
+if (loc.getRegion().getReplicaId() >= maxReplicaId) {
+  maxReplicaId = loc.getRegion().getReplicaId();
   maxReplicaIdIndex = index;
 }
   }
@@ -72,7 +72,7 @@ public class RegionLocations {
   this.locations = new HRegionLocation[maxReplicaId + 1];
   for (HRegionLocation loc : locations) {
 if (loc != null) {
-  this.locations[loc.getRegionInfo().getReplicaId()] = loc;
+  this.locations[loc.getRegion().getReplicaId()] = loc;
 }
   }
 }
@@ -146,7 +146,7 @@ public class RegionLocations {
   public RegionLocations remove(HRegionLocation location) {
 if (location == null) return this;
 if (location.getRegion() == null) return this;
-int replicaId = location.getRegionInfo().getReplicaId();
+int replicaId = location.getRegion().getReplicaId();
 if (replicaId >= locations.length) return this;
 
 // check whether something to remove. HRL.compareTo() compares ONLY the
@@ -203,14 +203,14 @@ public class RegionLocations {
 // in case of region replication going down, we might have a leak here.
 int max = other.locations.length;
 
-HRegionInfo regionInfo = null;
+RegionInfo regionInfo = null;
 for (int i = 0; i < max; i++) {
   HRegionLocation thisLoc = this.getRegionLocation(i);
   HRegionLocation otherLoc = other.getRegionLocation(i);
-  if (regionInfo == null && otherLoc != null && otherLoc.getRegionInfo() 
!= null) {
+  if (regionInfo == null && otherLoc != null && otherLoc.getRegion() != 
null) {
 // regionInfo is the first non-null HRI from other RegionLocations. We 
use it to ensure that
 // all replica region infos belong to the same region with same region 
id.
-regionInfo = otherLoc.getRegionInfo();
+regionInfo = otherLoc.getRegion();
   }
 
   HRegionLocation selectedLoc = selectRegionLocation(thisLoc,
@@ -232,7 +232,7 @@ public class RegionLocations {
   

[1/3] hbase git commit: HBASE-17356 Add replica get support

2019-01-02 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 8923b0e17 -> 0c3da777e


http://git-wip-us.apache.org/repos/asf/hbase/blob/0c3da777/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
index d705d7c..28db7e8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.client;
 import static java.util.stream.Collectors.toList;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.RpcChannel;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,11 +32,11 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.TableName;
 import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
@@ -45,9 +45,12 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
@@ -63,7 +66,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 
 /**
  * The implementation of RawAsyncTable.
- * 
+ * 
  * The word 'Raw' means that this is a low level class. The returned {@link 
CompletableFuture} will
  * be finished inside the rpc framework thread, which means that the callbacks 
registered to the
  * {@link CompletableFuture} will also be executed inside the rpc framework 
thread. So users who use
@@ -74,6 +77,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 @InterfaceAudience.Private
 class RawAsyncTableImpl implements AsyncTable {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(RawAsyncTableImpl.class);
+
   private final AsyncConnectionImpl conn;
 
   private final TableName tableName;
@@ -204,58 +209,126 @@ class RawAsyncTableImpl implements 
AsyncTable {
 
   private  SingleRequestCallerBuilder newCaller(byte[] row, long 
rpcTimeoutNs) {
 return conn.callerFactory. single().table(tableName).row(row)
-.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
-.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
-.pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
-.startLogErrorsCnt(startLogErrorsCnt);
+  .rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
+  .operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
+  .pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
+  .startLogErrorsCnt(startLogErrorsCnt);
   }
 
   private  SingleRequestCallerBuilder newCaller(Row row, long 
rpcTimeoutNs) {
 return newCaller(row.getRow(), rpcTimeoutNs);
   }
 
+  private CompletableFuture get(Get get, int replicaId, long 
timeoutNs) {
+return this. newCaller(get, timeoutNs)
+  .action((controller, loc, stub) -> RawAsyncTableImpl
+. call(controller, loc, stub, 
get,
+  RequestConverter::buildGetRequest, (s, c, req, done) -> s.get(c, 
req, done),
+  (c, resp) -> ProtobufUtil.toResult(resp.getResult(), 
c.cellScanner(
+  .replicaId(replicaId).call();
+  }
+
+  // Connect the two futures, if the src future is done, then mark the dst 
future as done. And if
+  // the dst future is done, then cancel the src future. This is used for 
timeline consistent read.
+  private  void connect(CompletableFuture srcFuture, 
CompletableFuture dstFuture) {
+addListener(srcFuture, (r, e) -> {
+  if (e != null) {
+dstFuture.completeExceptionally(e);
+  } else {
+dstFuture.complete(r);
+  }
+});
+// The 

svn commit: r31743 - /dev/hbase/hbase-2.1.2RC1/compatibility_report_2.1.1vs2.1.2.html

2019-01-02 Thread stack
Author: stack
Date: Wed Jan  2 21:01:57 2019
New Revision: 31743

Log:
Add compat report

Added:
dev/hbase/hbase-2.1.2RC1/compatibility_report_2.1.1vs2.1.2.html

Added: dev/hbase/hbase-2.1.2RC1/compatibility_report_2.1.1vs2.1.2.html
==
--- dev/hbase/hbase-2.1.2RC1/compatibility_report_2.1.1vs2.1.2.html (added)
+++ dev/hbase/hbase-2.1.2RC1/compatibility_report_2.1.1vs2.1.2.html Wed Jan  2 
21:01:57 2019
@@ -0,0 +1,476 @@
+
+
+http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
+http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
+
+
+
+
+
+
+hbase: rel/2.0.3 to 205e39c5704bf38568b34926dde9f1ee76e6b5d0 
compatibility report
+
+body {
+font-family:Arial, sans-serif;
+background-color:White;
+color:Black;
+}
+hr {
+color:Black;
+background-color:Black;
+height:1px;
+border:0;
+}
+h1 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.625em;
+}
+h2 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.25em;
+white-space:nowrap;
+}
+div.symbols {
+color:#003E69;
+}
+div.symbols i {
+color:Brown;
+}
+span.section {
+font-weight:bold;
+cursor:pointer;
+color:#003E69;
+white-space:nowrap;
+margin-left:0.3125em;
+}
+span:hover.section {
+color:#336699;
+}
+span.sect_aff {
+cursor:pointer;
+padding-left:1.55em;
+font-size:0.875em;
+color:#cc3300;
+}
+span.ext {
+font-weight:normal;
+}
+span.jar {
+color:#cc3300;
+font-size:0.875em;
+font-weight:bold;
+}
+div.jar_list {
+padding-left:0.4em;
+font-size:0.94em;
+}
+span.pkg_t {
+color:#408080;
+font-size:0.875em;
+}
+span.pkg {
+color:#408080;
+font-size:0.875em;
+font-weight:bold;
+}
+span.cname {
+color:Green;
+font-size:0.875em;
+font-weight:bold;
+}
+span.iname_b {
+font-weight:bold;
+}
+span.iname_a {
+color:#33;
+font-weight:bold;
+font-size:0.94em;
+}
+span.sym_p {
+font-weight:normal;
+white-space:normal;
+}
+span.sym_pd {
+white-space:normal;
+}
+span.sym_p span, span.sym_pd span {
+white-space:nowrap;
+}
+span.attr {
+color:Black;
+font-weight:normal;
+}
+span.deprecated {
+color:Red;
+font-weight:bold;
+font-family:Monaco, monospace;
+}
+div.affect {
+padding-left:1em;
+padding-bottom:10px;
+font-size:0.87em;
+font-style:italic;
+line-height:0.9em;
+}
+div.affected {
+padding-left:2em;
+padding-top:10px;
+}
+table.ptable {
+border-collapse:collapse;
+border:1px outset black;
+margin-left:0.95em;
+margin-top:3px;
+margin-bottom:3px;
+width:56.25em;
+}
+table.ptable td {
+border:1px solid Gray;
+padding:3px;
+font-size:0.875em;
+text-align:left;
+vertical-align:top;
+max-width:28em;
+word-wrap:break-word;
+}
+table.ptable th {
+background-color:#ee;
+font-weight:bold;
+color:#33;
+font-family:Verdana, Arial;
+font-size:0.875em;
+border:1px solid Gray;
+text-align:center;
+vertical-align:top;
+white-space:nowrap;
+padding:3px;
+}
+table.summary {
+border-collapse:collapse;
+border:1px outset black;
+}
+table.summary th {
+background-color:#ee;
+font-weight:normal;
+text-align:left;
+font-size:0.94em;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px;
+}
+table.summary td {
+text-align:right;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px 5px 3px 10px;
+}
+span.mngl {
+padding-left:1em;
+font-size:0.875em;
+cursor:text;
+color:#44;
+font-weight:bold;
+}
+span.pleft {
+padding-left:2.5em;
+}
+span.color_p {
+font-style:italic;
+color:Brown;
+}
+span.param {
+font-style:italic;
+}
+span.focus_p {
+font-style:italic;
+background-color:#DCDCDC;
+}
+span.ttype {
+font-weight:normal;
+}
+span.nowrap {
+white-space:nowrap;
+}
+span.value {
+white-space:nowrap;
+font-weight:bold;
+}
+.passed {
+background-color:#CCFFCC;
+font-weight:normal;
+}
+.warning {
+background-color:#F4F4AF;
+font-weight:normal;
+}
+.failed {
+background-color:#FF;
+font-weight:normal;
+}
+.new {
+background-color:#C6DEFF;
+font-weight:normal;
+}
+
+.compatible {
+background-color:#CCFFCC;
+font-weight:normal;
+}
+.almost_compatible {
+background-color:#FFDAA3;
+font-weight:normal;
+}
+.incompatible {
+background-color:#FF;
+font-weight:normal;
+}
+.gray {
+background-color:#DCDCDC;
+font-weight:normal;
+}
+
+.top_ref {
+font-size:0.69em;
+}
+.footer {
+font-size:0.8125em;
+}
+.tabset {
+float:left;
+}
+a.tab {
+border:1px solid Black;
+float:left;
+margin:0px 5px -1px 0px;
+padding:3px 5px 3px 5px;
+position:relative;
+font-size:0.875em;
+background-color:#DDD;
+text-decoration:none;
+color:Black;
+}

[26/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
index c4e8c8b..aa58108 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
@@ -82,602 +82,613 @@
 074  public static final String 
USER_COPROCESSORS_ENABLED_CONF_KEY =
 075"hbase.coprocessor.user.enabled";
 076  public static final boolean 
DEFAULT_USER_COPROCESSORS_ENABLED = true;
-077
-078  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocessorHost.class);
-079  protected Abortable abortable;
-080  /** Ordered set of loaded coprocessors 
with lock */
-081  protected final SortedListE 
coprocEnvironments =
-082  new SortedList(new 
EnvironmentPriorityComparator());
-083  protected Configuration conf;
-084  // unique file prefix to use for local 
copies of jars when classloading
-085  protected String pathPrefix;
-086  protected AtomicInteger loadSequence = 
new AtomicInteger();
-087
-088  public CoprocessorHost(Abortable 
abortable) {
-089this.abortable = abortable;
-090this.pathPrefix = 
UUID.randomUUID().toString();
-091  }
-092
-093  /**
-094   * Not to be confused with the 
per-object _coprocessors_ (above),
-095   * coprocessorNames is static and 
stores the set of all coprocessors ever
-096   * loaded by any thread in this JVM. It 
is strictly additive: coprocessors are
-097   * added to coprocessorNames, by 
checkAndLoadInstance() but are never removed, since
-098   * the intention is to preserve a 
history of all loaded coprocessors for
-099   * diagnosis in case of server crash 
(HBASE-4014).
-100   */
-101  private static SetString 
coprocessorNames =
-102  Collections.synchronizedSet(new 
HashSetString());
-103
-104  public static SetString 
getLoadedCoprocessors() {
-105synchronized (coprocessorNames) {
-106  return new 
HashSet(coprocessorNames);
-107}
-108  }
-109
-110  /**
-111   * Used to create a parameter to the 
HServerLoad constructor so that
-112   * HServerLoad can provide information 
about the coprocessors loaded by this
-113   * regionserver.
-114   * (HBASE-4070: Improve region server 
metrics to report loaded coprocessors
-115   * to master).
-116   */
-117  public SetString 
getCoprocessors() {
-118SetString returnValue = new 
TreeSet();
-119for (E e: coprocEnvironments) {
-120  
returnValue.add(e.getInstance().getClass().getSimpleName());
-121}
-122return returnValue;
-123  }
-124
-125  /**
-126   * Load system coprocessors once only. 
Read the class names from configuration.
-127   * Called by constructor.
-128   */
-129  protected void 
loadSystemCoprocessors(Configuration conf, String confKey) {
-130boolean coprocessorsEnabled = 
conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
-131  DEFAULT_COPROCESSORS_ENABLED);
-132if (!coprocessorsEnabled) {
-133  return;
-134}
-135
-136Class? implClass;
-137
-138// load default coprocessors from 
configure file
-139String[] defaultCPClasses = 
conf.getStrings(confKey);
-140if (defaultCPClasses == null || 
defaultCPClasses.length == 0)
-141  return;
-142
-143int priority = 
Coprocessor.PRIORITY_SYSTEM;
-144for (String className : 
defaultCPClasses) {
-145  className = className.trim();
-146  if (findCoprocessor(className) != 
null) {
-147// If already loaded will just 
continue
-148LOG.warn("Attempted duplicate 
loading of " + className + "; skipped");
-149continue;
-150  }
-151  ClassLoader cl = 
this.getClass().getClassLoader();
-152  
Thread.currentThread().setContextClassLoader(cl);
-153  try {
-154implClass = 
cl.loadClass(className);
-155// Add coprocessors as we go to 
guard against case where a coprocessor is specified twice
-156// in the configuration
-157E env = 
checkAndLoadInstance(implClass, priority, conf);
-158if (env != null) {
-159  
this.coprocEnvironments.add(env);
-160  LOG.info("System coprocessor {} 
loaded, priority={}.", className, priority);
-161  ++priority;
-162}
-163  } catch (Throwable t) {
-164// We always abort if system 
coprocessors cannot be loaded
-165abortServer(className, t);
-166  }
-167}
-168  }
-169
-170  /**
-171   * Load a coprocessor implementation 
into the host
-172   * @param path path to implementation 
jar
-173   * @param className the main class 
name
-174   * @param priority chaining priority

[29/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
index c4e8c8b..aa58108 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
@@ -82,602 +82,613 @@
 074  public static final String 
USER_COPROCESSORS_ENABLED_CONF_KEY =
 075"hbase.coprocessor.user.enabled";
 076  public static final boolean 
DEFAULT_USER_COPROCESSORS_ENABLED = true;
-077
-078  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocessorHost.class);
-079  protected Abortable abortable;
-080  /** Ordered set of loaded coprocessors 
with lock */
-081  protected final SortedListE 
coprocEnvironments =
-082  new SortedList(new 
EnvironmentPriorityComparator());
-083  protected Configuration conf;
-084  // unique file prefix to use for local 
copies of jars when classloading
-085  protected String pathPrefix;
-086  protected AtomicInteger loadSequence = 
new AtomicInteger();
-087
-088  public CoprocessorHost(Abortable 
abortable) {
-089this.abortable = abortable;
-090this.pathPrefix = 
UUID.randomUUID().toString();
-091  }
-092
-093  /**
-094   * Not to be confused with the 
per-object _coprocessors_ (above),
-095   * coprocessorNames is static and 
stores the set of all coprocessors ever
-096   * loaded by any thread in this JVM. It 
is strictly additive: coprocessors are
-097   * added to coprocessorNames, by 
checkAndLoadInstance() but are never removed, since
-098   * the intention is to preserve a 
history of all loaded coprocessors for
-099   * diagnosis in case of server crash 
(HBASE-4014).
-100   */
-101  private static SetString 
coprocessorNames =
-102  Collections.synchronizedSet(new 
HashSetString());
-103
-104  public static SetString 
getLoadedCoprocessors() {
-105synchronized (coprocessorNames) {
-106  return new 
HashSet(coprocessorNames);
-107}
-108  }
-109
-110  /**
-111   * Used to create a parameter to the 
HServerLoad constructor so that
-112   * HServerLoad can provide information 
about the coprocessors loaded by this
-113   * regionserver.
-114   * (HBASE-4070: Improve region server 
metrics to report loaded coprocessors
-115   * to master).
-116   */
-117  public SetString 
getCoprocessors() {
-118SetString returnValue = new 
TreeSet();
-119for (E e: coprocEnvironments) {
-120  
returnValue.add(e.getInstance().getClass().getSimpleName());
-121}
-122return returnValue;
-123  }
-124
-125  /**
-126   * Load system coprocessors once only. 
Read the class names from configuration.
-127   * Called by constructor.
-128   */
-129  protected void 
loadSystemCoprocessors(Configuration conf, String confKey) {
-130boolean coprocessorsEnabled = 
conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
-131  DEFAULT_COPROCESSORS_ENABLED);
-132if (!coprocessorsEnabled) {
-133  return;
-134}
-135
-136Class? implClass;
-137
-138// load default coprocessors from 
configure file
-139String[] defaultCPClasses = 
conf.getStrings(confKey);
-140if (defaultCPClasses == null || 
defaultCPClasses.length == 0)
-141  return;
-142
-143int priority = 
Coprocessor.PRIORITY_SYSTEM;
-144for (String className : 
defaultCPClasses) {
-145  className = className.trim();
-146  if (findCoprocessor(className) != 
null) {
-147// If already loaded will just 
continue
-148LOG.warn("Attempted duplicate 
loading of " + className + "; skipped");
-149continue;
-150  }
-151  ClassLoader cl = 
this.getClass().getClassLoader();
-152  
Thread.currentThread().setContextClassLoader(cl);
-153  try {
-154implClass = 
cl.loadClass(className);
-155// Add coprocessors as we go to 
guard against case where a coprocessor is specified twice
-156// in the configuration
-157E env = 
checkAndLoadInstance(implClass, priority, conf);
-158if (env != null) {
-159  
this.coprocEnvironments.add(env);
-160  LOG.info("System coprocessor {} 
loaded, priority={}.", className, priority);
-161  ++priority;
-162}
-163  } catch (Throwable t) {
-164// We always abort if system 
coprocessors cannot be loaded
-165abortServer(className, t);
-166  }
-167}
-168  }
-169
-170  /**
-171   * Load a coprocessor implementation 
into the host
-172   * @param path path to implementation 
jar
-173   * @param className the main class 
name
-174   * @param priority chaining 

[09/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
index 1a0f64e..2290ca8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
@@ -26,849 +26,796 @@
 018 */
 019package 
org.apache.hadoop.hbase.thrift2;
 020
-021import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
-022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.columnFamilyDescriptorFromThrift;
-023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.compareOpFromThrift;
-024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
-025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
-026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
-027import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
-028import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
-029import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromHBase;
-030import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromThrift;
-031import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorsFromHBase;
-032import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
-033import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
-034import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
-035import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
-036import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
-037import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
-038import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.splitKeyFromThrift;
-039import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromHBase;
-040import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromThrift;
-041import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorsFromHBase;
-042import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNameFromThrift;
-043import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNamesFromHBase;
-044import static 
org.apache.thrift.TBaseHelper.byteBufferToByteArray;
-045
-046import java.io.IOException;
-047import 
java.lang.reflect.InvocationHandler;
-048import 
java.lang.reflect.InvocationTargetException;
-049import java.lang.reflect.Method;
-050import java.lang.reflect.Proxy;
-051import java.nio.ByteBuffer;
-052import java.util.ArrayList;
-053import java.util.Collections;
-054import java.util.List;
-055import java.util.Map;
-056import 
java.util.concurrent.ConcurrentHashMap;
-057import 
java.util.concurrent.atomic.AtomicInteger;
-058import java.util.regex.Pattern;
-059
-060import 
org.apache.hadoop.conf.Configuration;
-061import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-062import 
org.apache.hadoop.hbase.HRegionLocation;
-063import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-064import 
org.apache.hadoop.hbase.TableName;
-065import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-066import 
org.apache.hadoop.hbase.client.RegionLocator;
-067import 
org.apache.hadoop.hbase.client.ResultScanner;
-068import 
org.apache.hadoop.hbase.client.Table;
-069import 
org.apache.hadoop.hbase.client.TableDescriptor;
-070import 
org.apache.hadoop.hbase.security.UserProvider;
-071import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;
-072import 
org.apache.hadoop.hbase.thrift2.generated.TAppend;
-073import 
org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor;
-074import 
org.apache.hadoop.hbase.thrift2.generated.TCompareOp;
-075import 
org.apache.hadoop.hbase.thrift2.generated.TDelete;
-076import 
org.apache.hadoop.hbase.thrift2.generated.TGet;
-077import 
org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-078import 
org.apache.hadoop.hbase.thrift2.generated.THRegionLocation;
-079import 
org.apache.hadoop.hbase.thrift2.generated.TIOError;
-080import 
org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument;
-081import 
org.apache.hadoop.hbase.thrift2.generated.TIncrement;
-082import 
org.apache.hadoop.hbase.thrift2.generated.TNamespaceDescriptor;
-083import 
org.apache.hadoop.hbase.thrift2.generated.TPut;
-084import 
org.apache.hadoop.hbase.thrift2.generated.TResult;
-085import 
org.apache.hadoop.hbase.thrift2.generated.TRowMutations;
-086import 

[34/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.IOErrorWithCause.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.IOErrorWithCause.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.IOErrorWithCause.html
deleted file mode 100644
index bcd5b20..000
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.IOErrorWithCause.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.IOErrorWithCause (Apache 
HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.thrift.ThriftServerRunner.IOErrorWithCause
-
-No usage of 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.IOErrorWithCause
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.ImplType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.ImplType.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.ImplType.html
deleted file mode 100644
index 28bac39..000
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ThriftServerRunner.ImplType.html
+++ /dev/null
@@ -1,194 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType (Apache HBase 
3.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses 
of Classorg.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType
-
-
-
-
-
-Packages that use ThriftServerRunner.ImplType
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.thrift
-
-Provides an HBase http://incubator.apache.org/thrift/;>Thrift
-service.
-
-
-
-
-
-
-
-
-
-
-Uses of ThriftServerRunner.ImplType 
in org.apache.hadoop.hbase.thrift
-
-Fields in org.apache.hadoop.hbase.thrift
 declared as ThriftServerRunner.ImplType
-
-Modifier and Type
-Field and Description
-
-
-
-static ThriftServerRunner.ImplType
-ThriftServerRunner.ImplType.DEFAULT
-
-
-
-
-Methods in org.apache.hadoop.hbase.thrift
 that return ThriftServerRunner.ImplType
-
-Modifier and Type
-Method and Description
-
-
-
-static ThriftServerRunner.ImplType
-ThriftServerRunner.ImplType.getServerImpl(org.apache.hadoop.conf.Configurationconf)
-
-
-static ThriftServerRunner.ImplType
-ThriftServerRunner.ImplType.valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
-Returns the enum constant of this type with the specified 
name.
-
-
-
-static ThriftServerRunner.ImplType[]
-ThriftServerRunner.ImplType.values()
-Returns an array containing the constants of this enum 
type, in
-the order they are declared.
-
-
-
-
-
-
-
-
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview

[22/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
index d7e790d..1648cbc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
@@ -41,203 +41,203 @@
 033import 
java.util.concurrent.atomic.LongAdder;
 034import 
org.apache.hadoop.hbase.CellUtil;
 035import 
org.apache.hadoop.hbase.client.Table;
-036import 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
-037import 
org.apache.hadoop.hbase.thrift.generated.TIncrement;
-038import 
org.apache.hadoop.hbase.util.Bytes;
-039import 
org.apache.hadoop.hbase.util.Threads;
-040import 
org.apache.hadoop.metrics2.util.MBeans;
-041import org.apache.thrift.TException;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046/**
-047 * This class will coalesce increments 
from a thift server if
-048 * 
hbase.regionserver.thrift.coalesceIncrement is set to true. Turning this
-049 * config to true will cause the thrift 
server to queue increments into an
-050 * instance of this class. The thread 
pool associated with this class will drain
-051 * the coalesced increments as the thread 
is able. This can cause data loss if the
-052 * thrift server dies or is shut down 
before everything in the queue is drained.
-053 *
-054 */
-055@InterfaceAudience.Private
-056public class IncrementCoalescer 
implements IncrementCoalescerMBean {
-057
-058  /**
-059   * Used to identify a cell that will be 
incremented.
-060   *
-061   */
-062  static class FullyQualifiedRow {
-063private byte[] table;
-064private byte[] rowKey;
-065private byte[] family;
-066private byte[] qualifier;
-067
-068public FullyQualifiedRow(byte[] 
table, byte[] rowKey, byte[] fam, byte[] qual) {
-069  super();
-070  this.table = table;
-071  this.rowKey = rowKey;
-072  this.family = fam;
-073  this.qualifier = qual;
-074}
-075
-076public byte[] getTable() {
-077  return table;
-078}
-079
-080public void setTable(byte[] table) 
{
-081  this.table = table;
-082}
-083
-084public byte[] getRowKey() {
-085  return rowKey;
-086}
-087
-088public void setRowKey(byte[] rowKey) 
{
-089  this.rowKey = rowKey;
-090}
-091
-092public byte[] getFamily() {
-093  return family;
-094}
-095
-096public void setFamily(byte[] fam) {
-097  this.family = fam;
-098}
-099
-100public byte[] getQualifier() {
-101  return qualifier;
-102}
-103
-104public void setQualifier(byte[] qual) 
{
-105  this.qualifier = qual;
-106}
-107
-108@Override
-109public int hashCode() {
-110  final int prime = 31;
-111  int result = 1;
-112  result = prime * result + 
Arrays.hashCode(family);
-113  result = prime * result + 
Arrays.hashCode(qualifier);
-114  result = prime * result + 
Arrays.hashCode(rowKey);
-115  result = prime * result + 
Arrays.hashCode(table);
-116  return result;
-117}
-118
-119@Override
-120public boolean equals(Object obj) {
-121  if (this == obj) {
-122return true;
-123  }
-124  if (obj == null) {
-125return false;
-126  }
-127  if (getClass() != obj.getClass()) 
{
-128return false;
-129  }
-130
-131  FullyQualifiedRow other = 
(FullyQualifiedRow) obj;
-132
-133  if (!Arrays.equals(family, 
other.family)) {
-134return false;
-135  }
-136  if (!Arrays.equals(qualifier, 
other.qualifier)) {
-137return false;
-138  }
-139  if (!Arrays.equals(rowKey, 
other.rowKey)) {
-140return false;
-141  }
-142  if (!Arrays.equals(table, 
other.table)) {
-143return false;
-144  }
-145  return true;
-146}
-147
-148  }
-149
-150  static class DaemonThreadFactory 
implements ThreadFactory {
-151static final AtomicInteger poolNumber 
= new AtomicInteger(1);
-152final ThreadGroup group;
-153final AtomicInteger threadNumber = 
new AtomicInteger(1);
-154final String namePrefix;
-155
-156DaemonThreadFactory() {
-157  SecurityManager s = 
System.getSecurityManager();
-158  group = (s != null) ? 
s.getThreadGroup() : Thread.currentThread().getThreadGroup();
-159  namePrefix = "ICV-" + 
poolNumber.getAndIncrement() + "-thread-";
-160}
-161
-162@Override
-163public Thread newThread(Runnable r) 
{
-164  Thread t = new Thread(group, r, 
namePrefix + threadNumber.getAndIncrement(), 0);
-165
-166  

[28/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
index c4e8c8b..aa58108 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
@@ -82,602 +82,613 @@
 074  public static final String 
USER_COPROCESSORS_ENABLED_CONF_KEY =
 075"hbase.coprocessor.user.enabled";
 076  public static final boolean 
DEFAULT_USER_COPROCESSORS_ENABLED = true;
-077
-078  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocessorHost.class);
-079  protected Abortable abortable;
-080  /** Ordered set of loaded coprocessors 
with lock */
-081  protected final SortedListE 
coprocEnvironments =
-082  new SortedList(new 
EnvironmentPriorityComparator());
-083  protected Configuration conf;
-084  // unique file prefix to use for local 
copies of jars when classloading
-085  protected String pathPrefix;
-086  protected AtomicInteger loadSequence = 
new AtomicInteger();
-087
-088  public CoprocessorHost(Abortable 
abortable) {
-089this.abortable = abortable;
-090this.pathPrefix = 
UUID.randomUUID().toString();
-091  }
-092
-093  /**
-094   * Not to be confused with the 
per-object _coprocessors_ (above),
-095   * coprocessorNames is static and 
stores the set of all coprocessors ever
-096   * loaded by any thread in this JVM. It 
is strictly additive: coprocessors are
-097   * added to coprocessorNames, by 
checkAndLoadInstance() but are never removed, since
-098   * the intention is to preserve a 
history of all loaded coprocessors for
-099   * diagnosis in case of server crash 
(HBASE-4014).
-100   */
-101  private static SetString 
coprocessorNames =
-102  Collections.synchronizedSet(new 
HashSetString());
-103
-104  public static SetString 
getLoadedCoprocessors() {
-105synchronized (coprocessorNames) {
-106  return new 
HashSet(coprocessorNames);
-107}
-108  }
-109
-110  /**
-111   * Used to create a parameter to the 
HServerLoad constructor so that
-112   * HServerLoad can provide information 
about the coprocessors loaded by this
-113   * regionserver.
-114   * (HBASE-4070: Improve region server 
metrics to report loaded coprocessors
-115   * to master).
-116   */
-117  public SetString 
getCoprocessors() {
-118SetString returnValue = new 
TreeSet();
-119for (E e: coprocEnvironments) {
-120  
returnValue.add(e.getInstance().getClass().getSimpleName());
-121}
-122return returnValue;
-123  }
-124
-125  /**
-126   * Load system coprocessors once only. 
Read the class names from configuration.
-127   * Called by constructor.
-128   */
-129  protected void 
loadSystemCoprocessors(Configuration conf, String confKey) {
-130boolean coprocessorsEnabled = 
conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
-131  DEFAULT_COPROCESSORS_ENABLED);
-132if (!coprocessorsEnabled) {
-133  return;
-134}
-135
-136Class? implClass;
-137
-138// load default coprocessors from 
configure file
-139String[] defaultCPClasses = 
conf.getStrings(confKey);
-140if (defaultCPClasses == null || 
defaultCPClasses.length == 0)
-141  return;
-142
-143int priority = 
Coprocessor.PRIORITY_SYSTEM;
-144for (String className : 
defaultCPClasses) {
-145  className = className.trim();
-146  if (findCoprocessor(className) != 
null) {
-147// If already loaded will just 
continue
-148LOG.warn("Attempted duplicate 
loading of " + className + "; skipped");
-149continue;
-150  }
-151  ClassLoader cl = 
this.getClass().getClassLoader();
-152  
Thread.currentThread().setContextClassLoader(cl);
-153  try {
-154implClass = 
cl.loadClass(className);
-155// Add coprocessors as we go to 
guard against case where a coprocessor is specified twice
-156// in the configuration
-157E env = 
checkAndLoadInstance(implClass, priority, conf);
-158if (env != null) {
-159  
this.coprocEnvironments.add(env);
-160  LOG.info("System coprocessor {} 
loaded, priority={}.", className, priority);
-161  ++priority;
-162}
-163  } catch (Throwable t) {
-164// We always abort if system 
coprocessors cannot be loaded
-165abortServer(className, t);
-166  }
-167}
-168  }
-169
-170  /**
-171   * Load a coprocessor implementation 
into the host
-172   * @param path path to implementation 
jar
-173   * @param className the main class 
name
-174   * @param priority chaining priority
-175   * @param conf configuration for 
coprocessor
-176   * 

[43/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/Constants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/Constants.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/Constants.html
new file mode 100644
index 000..2b9a917
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/Constants.html
@@ -0,0 +1,1334 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Constants (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.thrift
+Class Constants
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.thrift.Constants
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+public final class Constants
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Thrift related constants
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static int
+BACKLOG_CONF_DEAFULT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BACKLOG_CONF_KEY
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BIND_CONF_KEY
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BIND_OPTION
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+COALESCE_INC_KEY
+
+
+static boolean
+COMPACT_CONF_DEFAULT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+COMPACT_CONF_KEY
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+COMPACT_OPTION
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+DEFAULT_BIND_ADDR
+
+
+static int
+DEFAULT_HTTP_MAX_HEADER_SIZE
+
+
+static int
+DEFAULT_LISTEN_PORT
+
+
+static boolean
+FRAMED_CONF_DEFAULT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+FRAMED_CONF_KEY
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+FRAMED_OPTION
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HTTP_MAX_THREADS_KEY
+
+
+static int
+HTTP_MAX_THREADS_KEY_DEFAULT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HTTP_MIN_THREADS_KEY
+
+
+static int
+HTTP_MIN_THREADS_KEY_DEFAULT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+INFOPORT_OPTION
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+KEEP_ALIVE_SEC_OPTION
+
+
+static int
+MAX_FRAME_SIZE_CONF_DEFAULT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+MAX_FRAME_SIZE_CONF_KEY
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+MAX_QUEUE_SIZE_OPTION
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+MAX_WORKERS_OPTION
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+MIN_WORKERS_OPTION
+
+
+static 

[15/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
deleted file mode 100644
index e692633..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
+++ /dev/null
@@ -1,2103 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/*
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.thrift;
-020
-021import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
-022
-023import java.io.IOException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.security.PrivilegedAction;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.TreeMap;
-036import 
java.util.concurrent.BlockingQueue;
-037import 
java.util.concurrent.ExecutorService;
-038import 
java.util.concurrent.LinkedBlockingQueue;
-039import 
java.util.concurrent.ThreadPoolExecutor;
-040import java.util.concurrent.TimeUnit;
-041
-042import 
javax.security.auth.callback.Callback;
-043import 
javax.security.auth.callback.UnsupportedCallbackException;
-044import 
javax.security.sasl.AuthorizeCallback;
-045import javax.security.sasl.SaslServer;
-046
-047import 
org.apache.commons.lang3.ArrayUtils;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.Cell.Type;
-050import 
org.apache.hadoop.hbase.CellBuilder;
-051import 
org.apache.hadoop.hbase.CellBuilderFactory;
-052import 
org.apache.hadoop.hbase.CellBuilderType;
-053import 
org.apache.hadoop.hbase.CellUtil;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HColumnDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.KeyValue;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.ServerName;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.client.Admin;
-065import 
org.apache.hadoop.hbase.client.Append;
-066import 
org.apache.hadoop.hbase.client.Delete;
-067import 
org.apache.hadoop.hbase.client.Durability;
-068import 
org.apache.hadoop.hbase.client.Get;
-069import 
org.apache.hadoop.hbase.client.Increment;
-070import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
-071import 
org.apache.hadoop.hbase.client.Put;
-072import 
org.apache.hadoop.hbase.client.RegionInfo;
-073import 
org.apache.hadoop.hbase.client.RegionLocator;
-074import 
org.apache.hadoop.hbase.client.Result;
-075import 
org.apache.hadoop.hbase.client.ResultScanner;
-076import 
org.apache.hadoop.hbase.client.Scan;
-077import 
org.apache.hadoop.hbase.client.Table;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.ParseFilter;
-080import 
org.apache.hadoop.hbase.filter.PrefixFilter;
-081import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-082import 
org.apache.hadoop.hbase.http.HttpServerUtil;
-083import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-084import 
org.apache.hadoop.hbase.security.SaslUtil;
-085import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-086import 
org.apache.hadoop.hbase.security.SecurityUtil;
-087import 
org.apache.hadoop.hbase.security.UserProvider;
-088import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-089import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-090import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;

[20/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.ResultScannerWrapper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.ResultScannerWrapper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.ResultScannerWrapper.html
new file mode 100644
index 000..5b5b199
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.ResultScannerWrapper.html
@@ -0,0 +1,1419 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase.thrift;
+021
+022import static 
org.apache.hadoop.hbase.thrift.Constants.COALESCE_INC_KEY;
+023import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
+024
+025import java.io.IOException;
+026import java.nio.ByteBuffer;
+027import java.util.ArrayList;
+028import java.util.Collections;
+029import java.util.HashMap;
+030import java.util.List;
+031import java.util.Map;
+032import java.util.TreeMap;
+033
+034import 
org.apache.hadoop.conf.Configuration;
+035import org.apache.hadoop.hbase.Cell;
+036import 
org.apache.hadoop.hbase.CellBuilder;
+037import 
org.apache.hadoop.hbase.CellBuilderFactory;
+038import 
org.apache.hadoop.hbase.CellBuilderType;
+039import 
org.apache.hadoop.hbase.CellUtil;
+040import 
org.apache.hadoop.hbase.HColumnDescriptor;
+041import 
org.apache.hadoop.hbase.HConstants;
+042import 
org.apache.hadoop.hbase.HRegionLocation;
+043import 
org.apache.hadoop.hbase.HTableDescriptor;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.MetaTableAccessor;
+046import 
org.apache.hadoop.hbase.ServerName;
+047import 
org.apache.hadoop.hbase.TableName;
+048import 
org.apache.hadoop.hbase.TableNotFoundException;
+049import 
org.apache.hadoop.hbase.client.Append;
+050import 
org.apache.hadoop.hbase.client.Delete;
+051import 
org.apache.hadoop.hbase.client.Durability;
+052import 
org.apache.hadoop.hbase.client.Get;
+053import 
org.apache.hadoop.hbase.client.Increment;
+054import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
+055import 
org.apache.hadoop.hbase.client.Put;
+056import 
org.apache.hadoop.hbase.client.RegionInfo;
+057import 
org.apache.hadoop.hbase.client.RegionLocator;
+058import 
org.apache.hadoop.hbase.client.Result;
+059import 
org.apache.hadoop.hbase.client.ResultScanner;
+060import 
org.apache.hadoop.hbase.client.Scan;
+061import 
org.apache.hadoop.hbase.client.Table;
+062import 
org.apache.hadoop.hbase.filter.Filter;
+063import 
org.apache.hadoop.hbase.filter.ParseFilter;
+064import 
org.apache.hadoop.hbase.filter.PrefixFilter;
+065import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
+066import 
org.apache.hadoop.hbase.security.UserProvider;
+067import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
+068import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
+069import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
+070import 
org.apache.hadoop.hbase.thrift.generated.Hbase;
+071import 
org.apache.hadoop.hbase.thrift.generated.IOError;
+072import 
org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
+073import 
org.apache.hadoop.hbase.thrift.generated.Mutation;
+074import 
org.apache.hadoop.hbase.thrift.generated.TAppend;
+075import 
org.apache.hadoop.hbase.thrift.generated.TCell;
+076import 
org.apache.hadoop.hbase.thrift.generated.TIncrement;
+077import 
org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
+078import 
org.apache.hadoop.hbase.thrift.generated.TRowResult;
+079import 
org.apache.hadoop.hbase.thrift.generated.TScan;
+080import 
org.apache.hadoop.hbase.util.Bytes;
+081import org.apache.thrift.TException;
+082import 
org.apache.yetus.audience.InterfaceAudience;
+083import org.slf4j.Logger;
+084import org.slf4j.LoggerFactory;
+085
+086import 
org.apache.hbase.thirdparty.com.google.common.base.Throwables;
+087
+088/**
+089 * The HBaseServiceHandler is a glue 
object that 

[46/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 8befc15..de1e950 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -169,8 +169,8 @@
 
 org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
 org.apache.hadoop.hbase.backup.BackupInfo.BackupState
-org.apache.hadoop.hbase.backup.BackupType
 org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
+org.apache.hadoop.hbase.backup.BackupType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 184a97c..e8886e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -10829,7 +10829,7 @@ service.
 
 
 private static TableName
-ThriftServerRunner.HBaseHandler.getTableName(https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBufferbuffer)
+ThriftHBaseServiceHandler.getTableName(https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBufferbuffer)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
index 50d2715..87f7b02 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
@@ -545,8 +545,8 @@ service.
 
 
 
-private Admin
-ThriftServerRunner.HBaseHandler.getAdmin()
+protected Admin
+HBaseServiceHandler.getAdmin()
 Obtain HBaseAdmin.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/client/class-use/OperationWithAttributes.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/OperationWithAttributes.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/OperationWithAttributes.html
index 5047938..04908b0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/OperationWithAttributes.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/OperationWithAttributes.html
@@ -240,7 +240,7 @@ service.
 
 
 private static void
-ThriftServerRunner.addAttributes(OperationWithAttributesop,
+ThriftHBaseServiceHandler.addAttributes(OperationWithAttributesop,
  https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer,https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBufferattributes)
 Adds all the attributes into the Operation object
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 00537ab..79246af 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -2291,7 +2291,7 @@ service.
 
 
 private Result
-ThriftServerRunner.HBaseHandler.getReverseScanResult(byte[]tableName,
+ThriftHBaseServiceHandler.getReverseScanResult(byte[]tableName,
 byte[]row,
 byte[]family)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
index b3b6d6e..aa4f195 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
@@ -445,7 +445,7 @@ 

[36/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html
deleted file mode 100644
index 780d858..000
--- a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html
+++ /dev/null
@@ -1,1291 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-ThriftServerRunner (Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":9,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.thrift
-Class 
ThriftServerRunner
-
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.thrift.ThriftServerRunner
-
-
-
-
-
-
-
-All Implemented Interfaces:
-https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
-
-
-
-@InterfaceAudience.Private
-public class ThriftServerRunner
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
-ThriftServerRunner - this class starts up a Thrift server 
which implements
- the Hbase API specified in the Hbase.thrift IDL file.
-
-
-
-
-
-
-
-
-
-
-
-Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-static class
-ThriftServerRunner.HBaseHandler
-The HBaseHandler is a glue object that connects Thrift RPC 
calls to the
- HBase client API primarily defined in the Admin and Table objects.
-
-
-
-static class
-ThriftServerRunner.ImplType
-An enum of server implementation selections
-
-
-
-static class
-ThriftServerRunner.IOErrorWithCause
-
-
-protected static class
-ThriftServerRunner.ResultScannerWrapper
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-BACKLOG_CONF_KEY
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-BIND_CONF_KEY
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-COALESCE_INC_KEY
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-COMPACT_CONF_KEY
-
-
-private 
org.apache.hadoop.conf.Configuration
-conf
-
-
-private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-DEFAULT_BIND_ADDR
-
-
-private static int
-DEFAULT_HTTP_MAX_HEADER_SIZE
-
-
-static int
-DEFAULT_LISTEN_PORT
-
-
-private boolean
-doAsEnabled
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-FRAMED_CONF_KEY
-
-
-private 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
-handler
-
-
-private ThriftServerRunner.HBaseHandler
-hbaseHandler
-
-
-private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-host
-
-
-static int
-HREGION_VERSION
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HTTP_MAX_THREADS_KEY

[19/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
new file mode 100644
index 000..5b5b199
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
@@ -0,0 +1,1419 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase.thrift;
+021
+022import static 
org.apache.hadoop.hbase.thrift.Constants.COALESCE_INC_KEY;
+023import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
+024
+025import java.io.IOException;
+026import java.nio.ByteBuffer;
+027import java.util.ArrayList;
+028import java.util.Collections;
+029import java.util.HashMap;
+030import java.util.List;
+031import java.util.Map;
+032import java.util.TreeMap;
+033
+034import 
org.apache.hadoop.conf.Configuration;
+035import org.apache.hadoop.hbase.Cell;
+036import 
org.apache.hadoop.hbase.CellBuilder;
+037import 
org.apache.hadoop.hbase.CellBuilderFactory;
+038import 
org.apache.hadoop.hbase.CellBuilderType;
+039import 
org.apache.hadoop.hbase.CellUtil;
+040import 
org.apache.hadoop.hbase.HColumnDescriptor;
+041import 
org.apache.hadoop.hbase.HConstants;
+042import 
org.apache.hadoop.hbase.HRegionLocation;
+043import 
org.apache.hadoop.hbase.HTableDescriptor;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.MetaTableAccessor;
+046import 
org.apache.hadoop.hbase.ServerName;
+047import 
org.apache.hadoop.hbase.TableName;
+048import 
org.apache.hadoop.hbase.TableNotFoundException;
+049import 
org.apache.hadoop.hbase.client.Append;
+050import 
org.apache.hadoop.hbase.client.Delete;
+051import 
org.apache.hadoop.hbase.client.Durability;
+052import 
org.apache.hadoop.hbase.client.Get;
+053import 
org.apache.hadoop.hbase.client.Increment;
+054import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
+055import 
org.apache.hadoop.hbase.client.Put;
+056import 
org.apache.hadoop.hbase.client.RegionInfo;
+057import 
org.apache.hadoop.hbase.client.RegionLocator;
+058import 
org.apache.hadoop.hbase.client.Result;
+059import 
org.apache.hadoop.hbase.client.ResultScanner;
+060import 
org.apache.hadoop.hbase.client.Scan;
+061import 
org.apache.hadoop.hbase.client.Table;
+062import 
org.apache.hadoop.hbase.filter.Filter;
+063import 
org.apache.hadoop.hbase.filter.ParseFilter;
+064import 
org.apache.hadoop.hbase.filter.PrefixFilter;
+065import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
+066import 
org.apache.hadoop.hbase.security.UserProvider;
+067import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
+068import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
+069import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
+070import 
org.apache.hadoop.hbase.thrift.generated.Hbase;
+071import 
org.apache.hadoop.hbase.thrift.generated.IOError;
+072import 
org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
+073import 
org.apache.hadoop.hbase.thrift.generated.Mutation;
+074import 
org.apache.hadoop.hbase.thrift.generated.TAppend;
+075import 
org.apache.hadoop.hbase.thrift.generated.TCell;
+076import 
org.apache.hadoop.hbase.thrift.generated.TIncrement;
+077import 
org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
+078import 
org.apache.hadoop.hbase.thrift.generated.TRowResult;
+079import 
org.apache.hadoop.hbase.thrift.generated.TScan;
+080import 
org.apache.hadoop.hbase.util.Bytes;
+081import org.apache.thrift.TException;
+082import 
org.apache.yetus.audience.InterfaceAudience;
+083import org.slf4j.Logger;
+084import org.slf4j.LoggerFactory;
+085
+086import 
org.apache.hbase.thirdparty.com.google.common.base.Throwables;
+087
+088/**
+089 * The HBaseServiceHandler is a glue 
object that connects Thrift RPC calls to the
+090 * HBase client API primarily defined in 
the 

[51/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/849d84a8
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/849d84a8
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/849d84a8

Branch: refs/heads/asf-site
Commit: 849d84a8efffddfd3c5b794dcacb2a057c5f8bf9
Parents: 33046fe
Author: jenkins 
Authored: Wed Jan 2 14:52:40 2019 +
Committer: jenkins 
Committed: Wed Jan 2 14:52:40 2019 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 33512 -
 checkstyle.rss  |   118 +-
 coc.html| 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html|12 +-
 devapidocs/allclasses-noframe.html  |12 +-
 devapidocs/constant-values.html |   970 +-
 devapidocs/index-all.html   |   577 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../hadoop/hbase/class-use/TableName.html   | 2 +-
 .../hadoop/hbase/client/class-use/Admin.html| 4 +-
 .../class-use/OperationWithAttributes.html  | 2 +-
 .../hadoop/hbase/client/class-use/Result.html   | 2 +-
 .../hbase/client/class-use/ResultScanner.html   | 8 +-
 .../hadoop/hbase/client/class-use/Table.html|14 +-
 .../hadoop/hbase/client/package-tree.html   |24 +-
 ...essorHost.EnvironmentPriorityComparator.html | 6 +-
 .../CoprocessorHost.ObserverGetter.html | 2 +-
 .../CoprocessorHost.ObserverOperation.html  |16 +-
 ...ocessorHost.ObserverOperationWithResult.html |18 +-
 ...ssorHost.ObserverOperationWithoutResult.html |12 +-
 .../hbase/coprocessor/CoprocessorHost.html  |   108 +-
 .../hadoop/hbase/coprocessor/package-tree.html  | 2 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 6 +-
 .../hadoop/hbase/http/class-use/InfoServer.html | 2 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 4 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../hbase/master/MasterCoprocessorHost.html | 2 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../hadoop/hbase/master/package-tree.html   | 2 +-
 .../hbase/master/procedure/package-tree.html| 4 +-
 .../hadoop/hbase/monitoring/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |18 +-
 .../hadoop/hbase/quotas/package-tree.html   | 8 +-
 .../regionserver/RegionCoprocessorHost.html | 2 +-
 .../RegionServerCoprocessorHost.html| 2 +-
 .../hadoop/hbase/regionserver/package-tree.html |18 +-
 .../regionserver/querymatcher/package-tree.html | 4 +-
 .../regionserver/wal/WALCoprocessorHost.html| 2 +-
 .../hbase/regionserver/wal/package-tree.html| 2 +-
 .../hadoop/hbase/replication/package-tree.html  | 2 +-
 .../replication/regionserver/package-tree.html  | 2 +-
 .../hbase/security/access/package-tree.html | 4 +-
 .../class-use/SaslUtil.QualityOfProtection.html |37 +-
 .../hbase/security/class-use/UserProvider.html  |36 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-use.html  | 3 -
 .../hadoop/hbase/thrift/CallQueue.Call.html | 4 +-
 .../apache/hadoop/hbase/thrift/Constants.html   |  1334 +
 .../apache/hadoop/hbase/thrift/DemoClient.html  | 4 +-
 .../hbase/thrift/HBaseServiceHandler.html   |   459 +
 .../thrift/HThreadedSelectorServerArgs.html | 4 +-
 .../hbase/thrift/HbaseHandlerMetricsProxy.html  |56 +-
 .../hadoop/hbase/thrift/HttpDoAsClient.html | 4 +-
 .../apache/hadoop/hbase/thrift/ImplType.html|   594 +
 .../IncrementCoalescer.DaemonThreadFactory.html |14 +-
 .../IncrementCoalescer.FullyQualifiedRow.html   |32 +-
 .../hadoop/hbase/thrift/IncrementCoalescer.html |38 +-
 .../hbase/thrift/THBaseThreadPoolExecutor.html  | 4 +-
 ...iftHBaseServiceHandler.IOErrorWithCause.html |   414 +
 ...BaseServiceHandler.ResultScannerWrapper.html |   340 +
 .../hbase/thrift/ThriftHBaseServiceHandler.html |  2309 ++
 .../hadoop/hbase/thrift/ThriftHttpServlet.html  |22 +-
 .../hadoop/hbase/thrift/ThriftServer.html   |   630 +-
 

[35/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/class-use/HBaseServiceHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/class-use/HBaseServiceHandler.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/class-use/HBaseServiceHandler.html
new file mode 100644
index 000..0a22131
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/thrift/class-use/HBaseServiceHandler.html
@@ -0,0 +1,245 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.thrift.HBaseServiceHandler 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+
+
+
+
+Packages that use HBaseServiceHandler
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.thrift
+
+Provides an HBase http://incubator.apache.org/thrift/;>Thrift
+service.
+
+
+
+org.apache.hadoop.hbase.thrift2
+
+Provides an HBase http://thrift.apache.org/;>Thrift
+service.
+
+
+
+
+
+
+
+
+
+
+Uses of HBaseServiceHandler in org.apache.hadoop.hbase.thrift
+
+Fields in org.apache.hadoop.hbase.thrift
 declared as HBaseServiceHandler
+
+Modifier and Type
+Field and Description
+
+
+
+private HBaseServiceHandler
+ThriftHttpServlet.handler
+
+
+protected HBaseServiceHandler
+ThriftServer.hbaseServiceHandler
+
+
+
+
+Methods in org.apache.hadoop.hbase.thrift
 that return HBaseServiceHandler
+
+Modifier and Type
+Method and Description
+
+
+
+protected HBaseServiceHandler
+ThriftServer.createHandler(org.apache.hadoop.conf.Configurationconf,
+ UserProvideruserProvider)
+
+
+
+
+Constructors in org.apache.hadoop.hbase.thrift
 with parameters of type HBaseServiceHandler
+
+Constructor and Description
+
+
+
+ThriftHttpServlet(org.apache.thrift.TProcessorprocessor,
+ 
org.apache.thrift.protocol.TProtocolFactoryprotocolFactory,
+ 
org.apache.hadoop.security.UserGroupInformationserviceUGI,
+ org.apache.hadoop.conf.Configurationconf,
+ HBaseServiceHandlerhandler,
+ booleansecurityEnabled,
+ booleandoAsEnabled)
+
+
+
+
+
+
+
+Uses of HBaseServiceHandler in org.apache.hadoop.hbase.thrift2
+
+Subclasses of HBaseServiceHandler in org.apache.hadoop.hbase.thrift2
+
+Modifier and Type
+Class and Description
+
+
+
+class
+ThriftHBaseServiceHandler
+This class is a glue object that connects Thrift RPC calls 
to the HBase client API primarily
+ defined in the Table interface.
+
+
+
+
+
+Methods in org.apache.hadoop.hbase.thrift2
 that return HBaseServiceHandler
+
+Modifier and Type
+Method and Description
+
+
+
+protected HBaseServiceHandler
+ThriftServer.createHandler(org.apache.hadoop.conf.Configurationconf,
+ UserProvideruserProvider)
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ImplType.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ImplType.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ImplType.html
new file mode 100644
index 000..65cdebb
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/class-use/ImplType.html
@@ -0,0 +1,194 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.thrift.ImplType (Apache HBase 
3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is 

[17/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
index 8d94cde..ab92b9d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
@@ -26,222 +26,778 @@
 018
 019package org.apache.hadoop.hbase.thrift;
 020
-021import 
org.apache.hadoop.conf.Configuration;
-022import 
org.apache.hadoop.hbase.HBaseConfiguration;
-023import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-024import 
org.apache.hadoop.hbase.http.InfoServer;
-025import 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType;
-026import 
org.apache.hadoop.hbase.util.VersionInfo;
-027import 
org.apache.hadoop.util.Shell.ExitCodeException;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import org.slf4j.Logger;
-030import org.slf4j.LoggerFactory;
-031import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-032import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
-033import 
org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
-034import 
org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
-035import 
org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
-036
-037/**
-038 * ThriftServer- this class starts up a 
Thrift server which implements the
-039 * Hbase API specified in the 
Hbase.thrift IDL file. The server runs in an
-040 * independent process.
-041 */
-042@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-043public class ThriftServer {
-044
-045  private static final Logger LOG = 
LoggerFactory.getLogger(ThriftServer.class);
-046
-047  private static final String 
MIN_WORKERS_OPTION = "minWorkers";
-048  private static final String 
MAX_WORKERS_OPTION = "workers";
-049  private static final String 
MAX_QUEUE_SIZE_OPTION = "queue";
-050  private static final String 
KEEP_ALIVE_SEC_OPTION = "keepAliveSec";
-051  static final String BIND_OPTION = 
"bind";
-052  static final String COMPACT_OPTION = 
"compact";
-053  static final String FRAMED_OPTION = 
"framed";
-054  static final String PORT_OPTION = 
"port";
-055  static final String INFOPORT_OPTION = 
"infoport";
-056
-057  private static final String 
DEFAULT_BIND_ADDR = "0.0.0.0";
-058  private static final int 
DEFAULT_LISTEN_PORT = 9090;
-059
-060  private Configuration conf;
-061  ThriftServerRunner serverRunner;
-062
-063  private InfoServer infoServer;
-064
-065  private static final String 
READ_TIMEOUT_OPTION = "readTimeout";
-066
-067  //
-068  // Main program and support routines
-069  //
-070
-071  public ThriftServer(Configuration conf) 
{
-072this.conf = 
HBaseConfiguration.create(conf);
-073  }
+021import static 
org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_DEAFULT;
+022import static 
org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_KEY;
+023import static 
org.apache.hadoop.hbase.thrift.Constants.BIND_CONF_KEY;
+024import static 
org.apache.hadoop.hbase.thrift.Constants.BIND_OPTION;
+025import static 
org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_DEFAULT;
+026import static 
org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_KEY;
+027import static 
org.apache.hadoop.hbase.thrift.Constants.COMPACT_OPTION;
+028import static 
org.apache.hadoop.hbase.thrift.Constants.DEFAULT_BIND_ADDR;
+029import static 
org.apache.hadoop.hbase.thrift.Constants.DEFAULT_HTTP_MAX_HEADER_SIZE;
+030import static 
org.apache.hadoop.hbase.thrift.Constants.DEFAULT_LISTEN_PORT;
+031import static 
org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_DEFAULT;
+032import static 
org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_KEY;
+033import static 
org.apache.hadoop.hbase.thrift.Constants.FRAMED_OPTION;
+034import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY;
+035import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY_DEFAULT;
+036import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY;
+037import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY_DEFAULT;
+038import static 
org.apache.hadoop.hbase.thrift.Constants.INFOPORT_OPTION;
+039import static 
org.apache.hadoop.hbase.thrift.Constants.KEEP_ALIVE_SEC_OPTION;
+040import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_DEFAULT;
+041import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_KEY;
+042import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_QUEUE_SIZE_OPTION;
+043import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_WORKERS_OPTION;
+044import static 
org.apache.hadoop.hbase.thrift.Constants.MIN_WORKERS_OPTION;
+045import static 

[06/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-summary.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-summary.html 
b/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-summary.html
index f17e275..ec3348b 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-summary.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-summary.html
@@ -479,58 +479,62 @@
 
 
 
-TestRegionObserverBypass
+TestRegionCoprocessorHost
 
 
 
-TestRegionObserverBypass.TestCoprocessor
+TestRegionObserverBypass
 
 
 
+TestRegionObserverBypass.TestCoprocessor
+
+
+
 TestRegionObserverBypass.TestCoprocessor2
 
 Calls through to TestCoprocessor.
 
 
-
+
 TestRegionObserverBypass.TestCoprocessor3
 
 Calls through to TestCoprocessor.
 
 
-
+
 TestRegionObserverForAddingMutationsFromCoprocessors
 
 
-
+
 TestRegionObserverForAddingMutationsFromCoprocessors.TestDeleteCellCoprocessor
 
 
-
+
 TestRegionObserverForAddingMutationsFromCoprocessors.TestDeleteFamilyCoprocessor
 
 
-
+
 TestRegionObserverForAddingMutationsFromCoprocessors.TestDeleteRowCoprocessor
 
 
-
+
 TestRegionObserverForAddingMutationsFromCoprocessors.TestMultiMutationCoprocessor
 
 
-
+
 TestRegionObserverForAddingMutationsFromCoprocessors.TestWALObserver
 
 
-
+
 TestRegionObserverInterface
 
 
-
+
 TestRegionObserverInterface.EvenOnlyCompactor
 
 
-
+
 TestRegionObserverPreFlushAndPreCompact
 
 Test that we fail if a Coprocessor tries to return a null 
scanner out
@@ -539,131 +543,131 @@
  CompactionLifeCycleTracker, CompactionRequest)
 
 
-
+
 TestRegionObserverPreFlushAndPreCompact.TestRegionObserver
 
 Coprocessor that returns null when preCompact or preFlush 
is called.
 
 
-
+
 TestRegionObserverScannerOpenHook
 
 
-
+
 TestRegionObserverScannerOpenHook.CompactionCompletionNotifyingRegion
 
 
-
+
 TestRegionObserverScannerOpenHook.EmptyRegionObsever
 
 Do the default logic in RegionObserver 
interface.
 
 
-
+
 TestRegionObserverScannerOpenHook.NoDataFilter
 
 
-
+
 TestRegionObserverScannerOpenHook.NoDataFromCompaction
 
 Don't allow any data to be written out in the compaction by 
creating a custom
  StoreScanner.
 
 
-
+
 TestRegionObserverScannerOpenHook.NoDataFromFlush
 
 Don't allow any data in a flush by creating a custom 
StoreScanner.
 
 
-
+
 TestRegionObserverScannerOpenHook.NoDataFromScan
 
 Don't return any data from a scan by creating a custom 
StoreScanner.
 
 
-
+
 TestRegionObserverStacking
 
 
-
+
 TestRegionObserverStacking.ObserverA
 
 
-
+
 TestRegionObserverStacking.ObserverB
 
 
-
+
 TestRegionObserverStacking.ObserverC
 
 
-
+
 TestRegionServerCoprocessorEndpoint
 
 
-
+
 TestRegionServerCoprocessorEndpoint.DummyRegionServerEndpoint
 
 
-
+
 TestRegionServerCoprocessorExceptionWithAbort
 
 Tests unhandled exceptions thrown by coprocessors running 
on a regionserver..
 
 
-
+
 TestRegionServerCoprocessorExceptionWithAbort.BuggyRegionObserver
 
 
-
+
 TestRegionServerCoprocessorExceptionWithAbort.FailedInitializationObserver
 
 
-
+
 TestRegionServerCoprocessorExceptionWithRemove
 
 Tests unhandled exceptions thrown by coprocessors running 
on regionserver.
 
 
-
+
 TestRegionServerCoprocessorExceptionWithRemove.BuggyRegionObserver
 
 
-
+
 TestRowProcessorEndpoint
 
 Verifies ProcessEndpoint works.
 
 
-
+
 TestRowProcessorEndpoint.RowProcessorEndpointS
 extends com.google.protobuf.Message,T extends 
com.google.protobuf.Message
 
 This class defines two RowProcessors:
  IncrementCounterProcessor and FriendsOfFriendsProcessor.
 
 
-
+
 TestRowProcessorEndpoint.RowProcessorEndpoint.FriendsOfFriendsProcessor
 
 
-
+
 TestRowProcessorEndpoint.RowProcessorEndpoint.IncrementCounterProcessor
 
 
-
+
 TestRowProcessorEndpoint.RowProcessorEndpoint.RowSwapProcessor
 
 
-
+
 TestRowProcessorEndpoint.RowProcessorEndpoint.TimeoutProcessor
 
 
-
+
 TestSecureExport
 
 
-
+
 TestWALObserver
 
 Tests invocation of the

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
index b6f3005..98006bc 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
@@ -240,6 +240,7 @@
 org.apache.hadoop.hbase.coprocessor.TestPostIncrementAndAppendBeforeWAL.ChangeCellWithDifferntColumnFamilyObserver
 (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
 org.apache.hadoop.hbase.coprocessor.TestPostIncrementAndAppendBeforeWAL.ChangeCellWithNotExistColumnFamilyObserver
 (implements 

[03/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index f71c161..a8cb53c 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -147,9 +147,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.wal.TestWALSplit.Corruptions
 org.apache.hadoop.hbase.wal.IOTestProvider.AllowedOperations
 org.apache.hadoop.hbase.wal.FaultyFSLog.FailureType
+org.apache.hadoop.hbase.wal.TestWALSplit.Corruptions
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/overview-tree.html
--
diff --git a/testdevapidocs/overview-tree.html 
b/testdevapidocs/overview-tree.html
index fb914b9..bf2fa0d 100644
--- a/testdevapidocs/overview-tree.html
+++ b/testdevapidocs/overview-tree.html
@@ -1176,6 +1176,15 @@
 org.apache.hadoop.hbase.util.HBaseHomePath
 org.apache.hadoop.hbase.security.HBaseKerberosUtils
 org.apache.hadoop.hbase.rest.HBaseRESTTestingUtility
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler 
(implements org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
+
+org.apache.hadoop.hbase.thrift.TestThriftServer.MySlowHBaseHandler (implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
+
+
+
+
 org.apache.hadoop.hbase.HBaseTestingUtility.PortAllocator
 org.apache.hadoop.hbase.HBaseTestingUtility.SeenRowTracker
 org.apache.hadoop.hbase.util.hbck.HbckTestingUtil
@@ -3426,6 +3435,7 @@
 org.apache.hadoop.hbase.filter.TestRegexComparator.TestCase
 org.apache.hadoop.hbase.master.assignment.TestRegionAssignedToMultipleRegionServers
 org.apache.hadoop.hbase.master.assignment.TestRegionBypass
+org.apache.hadoop.hbase.coprocessor.TestRegionCoprocessorHost
 org.apache.hadoop.hbase.regionserver.TestRegionFavoredNodes
 org.apache.hadoop.hbase.regionserver.TestRegionIncrement
 org.apache.hadoop.hbase.regionserver.TestRegionInfoBuilder
@@ -3936,11 +3946,16 @@
 org.apache.hadoop.hbase.thrift2.TestThriftHBaseServiceHandlerWithReadOnly
 org.apache.hadoop.hbase.thrift.TestThriftHttpServer
 
+org.apache.hadoop.hbase.thrift2.TestThrift2HttpServer
 org.apache.hadoop.hbase.thrift.TestThriftSpnegoHttpServer
 
 
 org.apache.hadoop.hbase.thrift.TestThriftServer
-org.apache.hadoop.hbase.thrift.TestThriftServerCmdLine
+org.apache.hadoop.hbase.thrift.TestThriftServerCmdLine
+
+org.apache.hadoop.hbase.thrift2.TestThrift2ServerCmdLine
+
+
 org.apache.hadoop.hbase.io.hadoopbackport.TestThrottledInputStream
 org.apache.hadoop.hbase.quotas.TestThrottleSettings
 org.apache.hadoop.hbase.TestTimeout
@@ -4134,11 +4149,6 @@
 org.apache.hadoop.hbase.zookeeper.TestZKNodeTracker.WaitToGetDataThread
 
 
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler 
(implements org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
-
-org.apache.hadoop.hbase.thrift.TestThriftServer.MySlowHBaseHandler (implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
-
-
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable (implements java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/serialized-form.html
--
diff --git a/testdevapidocs/serialized-form.html 
b/testdevapidocs/serialized-form.html
index da45683..ff3fb46 100644
--- a/testdevapidocs/serialized-form.html
+++ b/testdevapidocs/serialized-form.html
@@ -2307,41 +2307,41 @@
 0L
 
 
-
+
 
 
-Class org.apache.hadoop.hbase.thrift.ThriftHttpServlet extends 
org.apache.thrift.server.TServlet implements Serializable
-
-serialVersionUID:
-1L
-
+Class 
org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler.IOErrorWithCause 
extends org.apache.hadoop.hbase.thrift.generated.IOError implements 
Serializable
 
 
 Serialized Fields
 
-
-doAsEnabled
-boolean doAsEnabled
-
 
-securityEnabled

[39/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.html
index 7c7d5f4..d7a3f03 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 
@@ -186,8 +186,8 @@ extends org.apache.thrift.server.TServlet
 doAsEnabled
 
 
-private ThriftServerRunner.HBaseHandler
-hbaseHandler
+private HBaseServiceHandler
+handler
 
 
 private 
org.apache.hadoop.security.UserGroupInformation
@@ -228,11 +228,11 @@ extends org.apache.thrift.server.TServlet
 Constructor and Description
 
 
-ThriftHttpServlet(org.apache.thrift.TProcessorprocessor,
+ThriftHttpServlet(org.apache.thrift.TProcessorprocessor,
  
org.apache.thrift.protocol.TProtocolFactoryprotocolFactory,
  
org.apache.hadoop.security.UserGroupInformationserviceUGI,
  org.apache.hadoop.conf.Configurationconf,
- ThriftServerRunner.HBaseHandlerhbaseHandler,
+ HBaseServiceHandlerhandler,
  booleansecurityEnabled,
  booleandoAsEnabled)
 
@@ -345,13 +345,13 @@ extends org.apache.thrift.server.TServlet
 private final 
transientorg.apache.hadoop.security.UserGroupInformation httpUGI
 
 
-
+
 
 
 
 
-hbaseHandler
-private final transientThriftServerRunner.HBaseHandler hbaseHandler
+handler
+private final transientHBaseServiceHandler handler
 
 
 
@@ -393,7 +393,7 @@ extends org.apache.thrift.server.TServlet
 
 
 Constructor Detail
-
+
 
 
 
@@ -403,7 +403,7 @@ extends org.apache.thrift.server.TServlet
  
org.apache.thrift.protocol.TProtocolFactoryprotocolFactory,
  
org.apache.hadoop.security.UserGroupInformationserviceUGI,
  org.apache.hadoop.conf.Configurationconf,
- ThriftServerRunner.HBaseHandlerhbaseHandler,
+ HBaseServiceHandlerhandler,
  booleansecurityEnabled,
  booleandoAsEnabled)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -485,7 +485,7 @@ extends org.apache.thrift.server.TServlet
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServer.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServer.html
index 734cfc3..1088fbe 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServer.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10,"i1":9,"i2":9,"i3":9,"i4":10,"i5":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":9,"i13":10,"i14":10,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -100,18 +100,32 @@ var activeTableTab = "activeTableTab";
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.conf.Configured
+
+
 org.apache.hadoop.hbase.thrift.ThriftServer
 
 
 
+
+
 
 
 
+
+All Implemented Interfaces:
+org.apache.hadoop.conf.Configurable, org.apache.hadoop.util.Tool
+
+
+Direct Known Subclasses:
+ThriftServer
+
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class ThriftServer
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+public class ThriftServer
+extends org.apache.hadoop.conf.Configured
+implements org.apache.hadoop.util.Tool
 ThriftServer- this class starts up a Thrift server which 
implements the
  Hbase API specified in the Hbase.thrift IDL file. The server runs in an
  independent process.
@@ -134,68 +148,68 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Field and Description
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[07/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/pseudo-distributed.html
--
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index 0b77d22..c338bde 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
 Running Apache HBase (TM) in pseudo-distributed mode
@@ -318,7 +318,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/replication.html
--
diff --git a/replication.html b/replication.html
index e594006..85208d5 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Apache HBase (TM) Replication
@@ -313,7 +313,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/resources.html
--
diff --git a/resources.html b/resources.html
index c4cf437..4cd587f 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Other Apache HBase (TM) Resources
 
@@ -341,7 +341,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/source-repository.html
--
diff --git a/source-repository.html b/source-repository.html
index 00f6a3a..39adeb7 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Source Code Management
 
@@ -309,7 +309,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/sponsors.html
--
diff --git a/sponsors.html b/sponsors.html
index 692ec94..0129e39 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Sponsors
 
@@ -343,7 +343,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/supportingprojects.html
--
diff --git a/supportingprojects.html b/supportingprojects.html
index 06b9dea..1192b46 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Supporting Projects
 
@@ -530,7 +530,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/team-list.html
--
diff --git a/team-list.html b/team-list.html
index cfe8d06..4f4df99 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Team
 
@@ -776,7 +776,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2019-01-01
+  Last Published: 
2019-01-02
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/allclasses-frame.html

[48/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 0463965..b02ba85 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3831,7 +3831,7 @@
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 date
-"Tue Jan  1 14:43:08 UTC 2019"
+"Wed Jan  2 14:43:57 UTC 2019"
 
 
 
@@ -3845,7 +3845,7 @@
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 srcChecksum
-"262a737dc52f9624e627fb306255daeb"
+"d799bef05949064497daae3b1629805b"
 
 
 
@@ -7055,33 +7055,47 @@
 true
 
 
+
+
+publicstaticfinalboolean
+DEFAULT_SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+false
+
+
 
 
 publicstaticfinalboolean
 DEFAULT_USER_COPROCESSORS_ENABLED
 true
 
-
+
 
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 MASTER_COPROCESSOR_CONF_KEY
 "hbase.coprocessor.master.classes"
 
-
+
 
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_COPROCESSOR_CONF_KEY
 "hbase.coprocessor.region.classes"
 
-
+
 
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_COPROCESSOR_CONF_KEY
 "hbase.coprocessor.regionserver.classes"
 
+
+
+
+publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+"hbase.skip.load.duplicate.table.coprocessor"
+
 
 
 
@@ -26381,7 +26395,7 @@
 
 
 
-org.apache.hadoop.hbase.thrift.HThreadedSelectorServerArgs
+org.apache.hadoop.hbase.thrift.Constants
 
 Modifier and Type
 Constant Field
@@ -26389,429 +26403,445 @@
 
 
 
-
+
 
-publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-ACCEPT_POLICY_CONF_KEY
-"hbase.thrift.accept.policy"
+publicstaticfinalint
+BACKLOG_CONF_DEAFULT
+0
 
 
-
+
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-ACCEPT_QUEUE_SIZE_PER_THREAD_CONF_KEY
-"hbase.thrift.accept.queue.size.per.selector"
+BACKLOG_CONF_KEY
+"hbase.regionserver.thrift.backlog"
 
 
-
+
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-SELECTOR_THREADS_CONF_KEY
-"hbase.thrift.selector.threads"
+BIND_CONF_KEY
+"hbase.regionserver.thrift.ipaddress"
 
 
-
+
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-STOP_TIMEOUT_CONF_KEY
-"hbase.thrift.stop.timeout.seconds"
+BIND_OPTION
+"bind"
 
 
-
+
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-WORKER_THREADS_CONF_KEY
-"hbase.thrift.worker.threads"
+COALESCE_INC_KEY
+"hbase.regionserver.thrift.coalesceIncrement"
 
-
-
-
-
-
-org.apache.hadoop.hbase.thrift.HttpAuthenticationException
-
-Modifier and Type
-Constant Field
-Value
+
+
+
+publicstaticfinalboolean
+COMPACT_CONF_DEFAULT
+false
 
-
 
-
+
 
-privatestaticfinallong
-serialVersionUID
-0L
+publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+COMPACT_CONF_KEY
+"hbase.regionserver.thrift.compact"
 
-
-
-
-
-
-org.apache.hadoop.hbase.thrift.IncrementCoalescer
-
-Modifier and Type
-Constant Field
-Value
+
+
+
+publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+COMPACT_OPTION
+"compact"
 
-
 
-
+
 
-privatestaticfinalint
-CORE_POOL_SIZE
-1
+publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+DEFAULT_BIND_ADDR
+"0.0.0.0"
 
-
-
-
-
-
-org.apache.hadoop.hbase.thrift.MetricsThriftServerSource
-
-Modifier and Type
-Constant Field
-Value
+
+
+
+publicstaticfinalint
+DEFAULT_HTTP_MAX_HEADER_SIZE
+65536
 
-
 
-
+
 
-publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-ACTIVE_WORKER_COUNT_KEY
-"numActiveWorkers"
+publicstaticfinalint
+DEFAULT_LISTEN_PORT
+9090
 
 
-
+
 

[33/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
index 54aed81..0a554cd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 
@@ -137,7 +137,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class ThriftHBaseServiceHandler.TIOErrorWithCause
+private static class ThriftHBaseServiceHandler.TIOErrorWithCause
 extends org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
@@ -269,7 +269,7 @@ extends 
org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
 cause
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable cause
+privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable cause
 
 
 
@@ -286,7 +286,7 @@ extends 
org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
 TIOErrorWithCause
-publicTIOErrorWithCause(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablecause)
+publicTIOErrorWithCause(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablecause)
 
 
 
@@ -303,7 +303,7 @@ extends 
org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
 getCause
-publichttps://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">ThrowablegetCause()
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">ThrowablegetCause()
 
 Overrides:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getCause--;
 title="class or interface in java.lang">getCausein 
classhttps://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable
@@ -316,7 +316,7 @@ extends 
org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
 equals
-publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectother)
+publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectother)
 
 Overrides:
 equalsin 
classorg.apache.hadoop.hbase.thrift2.generated.TIOError
@@ -329,7 +329,7 @@ extends 
org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
 hashCode
-publicinthashCode()
+publicinthashCode()
 
 Overrides:
 hashCodein 
classorg.apache.hadoop.hbase.thrift2.generated.TIOError
@@ -364,7 +364,7 @@ extends 
org.apache.hadoop.hbase.thrift2.generated.TIOError
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 



[13/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ResultScannerWrapper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ResultScannerWrapper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ResultScannerWrapper.html
deleted file mode 100644
index e692633..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ResultScannerWrapper.html
+++ /dev/null
@@ -1,2103 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/*
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.thrift;
-020
-021import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
-022
-023import java.io.IOException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.security.PrivilegedAction;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.TreeMap;
-036import 
java.util.concurrent.BlockingQueue;
-037import 
java.util.concurrent.ExecutorService;
-038import 
java.util.concurrent.LinkedBlockingQueue;
-039import 
java.util.concurrent.ThreadPoolExecutor;
-040import java.util.concurrent.TimeUnit;
-041
-042import 
javax.security.auth.callback.Callback;
-043import 
javax.security.auth.callback.UnsupportedCallbackException;
-044import 
javax.security.sasl.AuthorizeCallback;
-045import javax.security.sasl.SaslServer;
-046
-047import 
org.apache.commons.lang3.ArrayUtils;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.Cell.Type;
-050import 
org.apache.hadoop.hbase.CellBuilder;
-051import 
org.apache.hadoop.hbase.CellBuilderFactory;
-052import 
org.apache.hadoop.hbase.CellBuilderType;
-053import 
org.apache.hadoop.hbase.CellUtil;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HColumnDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.KeyValue;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.ServerName;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.client.Admin;
-065import 
org.apache.hadoop.hbase.client.Append;
-066import 
org.apache.hadoop.hbase.client.Delete;
-067import 
org.apache.hadoop.hbase.client.Durability;
-068import 
org.apache.hadoop.hbase.client.Get;
-069import 
org.apache.hadoop.hbase.client.Increment;
-070import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
-071import 
org.apache.hadoop.hbase.client.Put;
-072import 
org.apache.hadoop.hbase.client.RegionInfo;
-073import 
org.apache.hadoop.hbase.client.RegionLocator;
-074import 
org.apache.hadoop.hbase.client.Result;
-075import 
org.apache.hadoop.hbase.client.ResultScanner;
-076import 
org.apache.hadoop.hbase.client.Scan;
-077import 
org.apache.hadoop.hbase.client.Table;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.ParseFilter;
-080import 
org.apache.hadoop.hbase.filter.PrefixFilter;
-081import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-082import 
org.apache.hadoop.hbase.http.HttpServerUtil;
-083import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-084import 
org.apache.hadoop.hbase.security.SaslUtil;
-085import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-086import 
org.apache.hadoop.hbase.security.SecurityUtil;
-087import 
org.apache.hadoop.hbase.security.UserProvider;
-088import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-089import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-090import 

[32/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html 
b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
index 8fc0ace..38c35a3 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":9,"i49":9,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -100,10 +100,15 @@ var activeTableTab = "activeTableTab";
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+
 org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler
 
 
 
+
+
 
 
 
@@ -114,8 +119,8 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ThriftHBaseServiceHandler
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+public class ThriftHBaseServiceHandler
+extends HBaseServiceHandler
 implements org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 This class is a glue object that connects Thrift RPC calls 
to the HBase client API primarily
  defined in the Table interface.
@@ -139,10 +144,6 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 
 
 private static class
-ThriftHBaseServiceHandler.THBaseServiceMetricsProxy
-
-
-private static class
 ThriftHBaseServiceHandler.TIOErrorWithCause
 
 
@@ -161,14 +162,6 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 Field and Description
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-CLEANUP_INTERVAL
-
-
-private ConnectionCache
-connectionCache
-
-
 private static https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 ioe
 
@@ -181,18 +174,21 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 LOG
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-MAX_IDLETIME
-
-
 private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger
 nextScannerId
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,ResultScanner
 scannerMap
 
 
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.thrift.HBaseServiceHandler
+CLEANUP_INTERVAL,
 conf,
 connectionCache,
 MAX_IDLETIME,
 metrics
+
 
 
 
@@ -220,7 +216,7 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 
 Method Summary
 
-All MethodsStatic MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
@@ -427,7 +423,7 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 
 
 
-private 

[12/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html
deleted file mode 100644
index e692633..000
--- a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.html
+++ /dev/null
@@ -1,2103 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/*
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.thrift;
-020
-021import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
-022
-023import java.io.IOException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.security.PrivilegedAction;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.TreeMap;
-036import 
java.util.concurrent.BlockingQueue;
-037import 
java.util.concurrent.ExecutorService;
-038import 
java.util.concurrent.LinkedBlockingQueue;
-039import 
java.util.concurrent.ThreadPoolExecutor;
-040import java.util.concurrent.TimeUnit;
-041
-042import 
javax.security.auth.callback.Callback;
-043import 
javax.security.auth.callback.UnsupportedCallbackException;
-044import 
javax.security.sasl.AuthorizeCallback;
-045import javax.security.sasl.SaslServer;
-046
-047import 
org.apache.commons.lang3.ArrayUtils;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.Cell.Type;
-050import 
org.apache.hadoop.hbase.CellBuilder;
-051import 
org.apache.hadoop.hbase.CellBuilderFactory;
-052import 
org.apache.hadoop.hbase.CellBuilderType;
-053import 
org.apache.hadoop.hbase.CellUtil;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HColumnDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.KeyValue;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.ServerName;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.client.Admin;
-065import 
org.apache.hadoop.hbase.client.Append;
-066import 
org.apache.hadoop.hbase.client.Delete;
-067import 
org.apache.hadoop.hbase.client.Durability;
-068import 
org.apache.hadoop.hbase.client.Get;
-069import 
org.apache.hadoop.hbase.client.Increment;
-070import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
-071import 
org.apache.hadoop.hbase.client.Put;
-072import 
org.apache.hadoop.hbase.client.RegionInfo;
-073import 
org.apache.hadoop.hbase.client.RegionLocator;
-074import 
org.apache.hadoop.hbase.client.Result;
-075import 
org.apache.hadoop.hbase.client.ResultScanner;
-076import 
org.apache.hadoop.hbase.client.Scan;
-077import 
org.apache.hadoop.hbase.client.Table;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.ParseFilter;
-080import 
org.apache.hadoop.hbase.filter.PrefixFilter;
-081import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-082import 
org.apache.hadoop.hbase.http.HttpServerUtil;
-083import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-084import 
org.apache.hadoop.hbase.security.SaslUtil;
-085import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-086import 
org.apache.hadoop.hbase.security.SecurityUtil;
-087import 
org.apache.hadoop.hbase.security.UserProvider;
-088import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-089import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-090import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-091import 
org.apache.hadoop.hbase.thrift.generated.Hbase;
-092import 

[50/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 1e3562d..5203a73 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -291,10 +291,10 @@
 Warnings
 Errors
 
-3834
+3840
 0
 0
-14667
+14666
 
 Files
 
@@ -8669,961 +8669,956 @@
 0
 3
 
-org/apache/hadoop/hbase/thrift2/ThriftServer.java
-0
-0
-1
-
 org/apache/hadoop/hbase/tool/Canary.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/tool/MapreduceTestingShim.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/tool/TestCanaryTool.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/trace/SpanReceiverHost.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/trace/TraceTree.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/trace/TraceUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/types/TestCopyOnWriteMaps.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/types/TestOrderedBlob.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/types/TestOrderedString.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/types/TestStruct.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/types/TestTerminatedWrapper.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/types/TestUnion2.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/util/AbstractByteRange.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/util/AbstractFileStatusFilter.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/util/AbstractHBaseTool.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/Addressing.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/util/AtomicUtils.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/AvlUtil.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/util/BloomContext.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/util/BloomFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/BloomFilterChunk.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/BloomFilterFactory.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/util/BloomFilterUtil.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/util/BloomFilterWriter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/util/BuilderStyleTest.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/ByteBufferArray.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/util/ByteBufferUtils.java
 0
 0
 45
-
+
 org/apache/hadoop/hbase/util/ByteRangeUtils.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/Bytes.java
 0
 0
 88
-
+
 org/apache/hadoop/hbase/util/ChecksumType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/util/ClassSize.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/util/Classes.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/CollectionBackedScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/CollectionUtils.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/util/CommonFSUtils.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/util/CompressionTest.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/util/ConcatenatedLists.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/ConfigurationUtil.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/util/ConnectionCache.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/util/ConstantDelayQueue.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/util/Counter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/DirectMemoryUtils.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/DynamicClassLoader.java
 0
 0
 26
-
+
 org/apache/hadoop/hbase/util/EncryptionTest.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/util/EnvironmentEdgeManager.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/EnvironmentEdgeManagerTestHelper.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/ExceptionUtil.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/util/FSHDFSUtils.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/util/FSRegionScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/util/FSTableDescriptors.java
 0
 0
 36
-
+
 org/apache/hadoop/hbase/util/FSUtils.java
 0
 0
 79
-
+
 org/apache/hadoop/hbase/util/FSVisitor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/util/FileStatusFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/GetJavaProperty.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/util/HBaseConfTool.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/util/HBaseFsck.java
 0
 0
 102
-
+
 org/apache/hadoop/hbase/util/HBaseFsckRepair.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/util/HBaseHomePath.java
 0
 0
 1
-
+
 

hbase-site git commit: INFRA-10751 Empty commit

2019-01-02 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 849d84a8e -> b5ab7a738


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b5ab7a73
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b5ab7a73
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b5ab7a73

Branch: refs/heads/asf-site
Commit: b5ab7a738df13d7b529acf93f745cabe0e1db1c6
Parents: 849d84a
Author: jenkins 
Authored: Wed Jan 2 14:53:05 2019 +
Committer: jenkins 
Committed: Wed Jan 2 14:53:05 2019 +

--

--




[42/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.html
new file mode 100644
index 000..f48b7d4
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.html
@@ -0,0 +1,459 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+HBaseServiceHandler (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.thrift
+Class 
HBaseServiceHandler
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+
+
+
+
+
+
+Direct Known Subclasses:
+ThriftHBaseServiceHandler, ThriftHBaseServiceHandler
+
+
+
+@InterfaceAudience.LimitedPrivate(value="Tools")
+public abstract class HBaseServiceHandler
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+abstract class for HBase handler
+ providing a Connection cache and get table/admin method
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CLEANUP_INTERVAL
+
+
+protected 
org.apache.hadoop.conf.Configuration
+conf
+
+
+protected ConnectionCache
+connectionCache
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+MAX_IDLETIME
+
+
+protected ThriftMetrics
+metrics
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+HBaseServiceHandler(org.apache.hadoop.conf.Configurationc,
+   UserProvideruserProvider)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected Admin
+getAdmin()
+Obtain HBaseAdmin.
+
+
+
+protected Table
+getTable(byte[]tableName)
+Creates and returns a Table instance from a given table 
name.
+
+
+
+protected Table
+getTable(https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBuffertableName)
+
+
+void
+initMetrics(ThriftMetricsmetrics)
+
+
+void
+setEffectiveUser(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringeffectiveUser)
+
+
+
+
+
+
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, 

[02/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftServer.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftServer.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftServer.html
index 60f13e3..ec94f7a 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftServer.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftServer.html
@@ -25,36 +25,36 @@
 017 */
 018package org.apache.hadoop.hbase.thrift;
 019
-020import static 
org.junit.Assert.assertArrayEquals;
-021import static 
org.junit.Assert.assertEquals;
-022import static 
org.junit.Assert.assertFalse;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Collection;
-030import java.util.HashMap;
-031import java.util.List;
-032import java.util.Map;
-033import 
org.apache.hadoop.conf.Configuration;
-034import 
org.apache.hadoop.hbase.CompatibilityFactory;
-035import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-036import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HConstants;
-039import 
org.apache.hadoop.hbase.HRegionInfo;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.TableName;
-042import 
org.apache.hadoop.hbase.client.Put;
-043import 
org.apache.hadoop.hbase.client.Table;
-044import 
org.apache.hadoop.hbase.filter.ParseFilter;
-045import 
org.apache.hadoop.hbase.security.UserProvider;
-046import 
org.apache.hadoop.hbase.test.MetricsAssertHelper;
-047import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-048import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-049import 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
+020import static 
org.apache.hadoop.hbase.thrift.Constants.COALESCE_INC_KEY;
+021import static 
org.junit.Assert.assertArrayEquals;
+022import static 
org.junit.Assert.assertEquals;
+023import static 
org.junit.Assert.assertFalse;
+024import static 
org.junit.Assert.assertTrue;
+025import static org.junit.Assert.fail;
+026
+027import java.io.IOException;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Collection;
+031import java.util.HashMap;
+032import java.util.List;
+033import java.util.Map;
+034import 
org.apache.hadoop.conf.Configuration;
+035import 
org.apache.hadoop.hbase.CompatibilityFactory;
+036import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
+038import 
org.apache.hadoop.hbase.HColumnDescriptor;
+039import 
org.apache.hadoop.hbase.HConstants;
+040import 
org.apache.hadoop.hbase.HRegionInfo;
+041import 
org.apache.hadoop.hbase.HTableDescriptor;
+042import 
org.apache.hadoop.hbase.TableName;
+043import 
org.apache.hadoop.hbase.client.Put;
+044import 
org.apache.hadoop.hbase.client.Table;
+045import 
org.apache.hadoop.hbase.filter.ParseFilter;
+046import 
org.apache.hadoop.hbase.security.UserProvider;
+047import 
org.apache.hadoop.hbase.test.MetricsAssertHelper;
+048import 
org.apache.hadoop.hbase.testclassification.ClientTests;
+049import 
org.apache.hadoop.hbase.testclassification.LargeTests;
 050import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
 051import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
 052import 
org.apache.hadoop.hbase.thrift.generated.Hbase;
@@ -79,7 +79,7 @@
 071import org.slf4j.LoggerFactory;
 072
 073/**
-074 * Unit testing for 
ThriftServerRunner.HBaseHandler, a part of the
+074 * Unit testing for 
ThriftServerRunner.HBaseServiceHandler, a part of the
 075 * org.apache.hadoop.hbase.thrift 
package.
 076 */
 077@Category({ClientTests.class, 
LargeTests.class})
@@ -121,7 +121,7 @@
 113
 114  @BeforeClass
 115  public static void beforeClass() throws 
Exception {
-116
UTIL.getConfiguration().setBoolean(ThriftServerRunner.COALESCE_INC_KEY, 
true);
+116
UTIL.getConfiguration().setBoolean(COALESCE_INC_KEY, true);
 117
UTIL.getConfiguration().setBoolean("hbase.table.sanity.checks", false);
 118
UTIL.getConfiguration().setInt("hbase.client.retries.number", 3);
 119UTIL.startMiniCluster();
@@ -160,8 +160,8 @@
 152   * IllegalArgument exception.
 153   */
 154  public void doTestTableCreateDrop() 
throws Exception {
-155ThriftServerRunner.HBaseHandler 
handler =
-156  new 
ThriftServerRunner.HBaseHandler(UTIL.getConfiguration(),
+155ThriftHBaseServiceHandler handler =
+156  new 
ThriftHBaseServiceHandler(UTIL.getConfiguration(),
 157
UserProvider.instantiate(UTIL.getConfiguration()));
 158doTestTableCreateDrop(handler);
 159  }
@@ -171,7 +171,7 @@
 163

[40/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
new file mode 100644
index 000..f6bc1f0
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.html
@@ -0,0 +1,2309 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+ThriftHBaseServiceHandler (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.thrift
+Class 
ThriftHBaseServiceHandler
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+
+org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
+
+
+
+@InterfaceAudience.Private
+public class ThriftHBaseServiceHandler
+extends HBaseServiceHandler
+implements org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
+The HBaseServiceHandler is a glue object that connects 
Thrift RPC calls to the
+ HBase client API primarily defined in the Admin and Table objects.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+static class
+ThriftHBaseServiceHandler.IOErrorWithCause
+
+
+protected static class
+ThriftHBaseServiceHandler.ResultScannerWrapper
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+(package private) IncrementCoalescer
+coalescer
+
+
+static int
+HREGION_VERSION
+
+
+private static org.slf4j.Logger
+LOG
+
+
+private int
+nextScannerId
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,ThriftHBaseServiceHandler.ResultScannerWrapper
+scannerMap
+
+
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.thrift.HBaseServiceHandler
+CLEANUP_INTERVAL,
 conf,
 connectionCache,
 MAX_IDLETIME,
 metrics
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+protected 
+ThriftHBaseServiceHandler(org.apache.hadoop.conf.Configurationc,
+ UserProvideruserProvider)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+private static void
+addAttributes(OperationWithAttributesop,
+ https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer,https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBufferattributes)
+Adds all the attributes into the Operation object
+
+
+
+protected int
+addScanner(ResultScannerscanner,
+  booleansortColumns)
+Assigns a unique ID to the scanner and adds the mapping to 
an internal
+ hash-map.

[11/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
deleted file mode 100644
index 1a0f64e..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
+++ /dev/null
@@ -1,935 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package 
org.apache.hadoop.hbase.thrift2;
-020
-021import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
-022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.columnFamilyDescriptorFromThrift;
-023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.compareOpFromThrift;
-024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
-025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
-026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
-027import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
-028import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
-029import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromHBase;
-030import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromThrift;
-031import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorsFromHBase;
-032import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
-033import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
-034import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
-035import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
-036import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
-037import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
-038import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.splitKeyFromThrift;
-039import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromHBase;
-040import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromThrift;
-041import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorsFromHBase;
-042import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNameFromThrift;
-043import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNamesFromHBase;
-044import static 
org.apache.thrift.TBaseHelper.byteBufferToByteArray;
-045
-046import java.io.IOException;
-047import 
java.lang.reflect.InvocationHandler;
-048import 
java.lang.reflect.InvocationTargetException;
-049import java.lang.reflect.Method;
-050import java.lang.reflect.Proxy;
-051import java.nio.ByteBuffer;
-052import java.util.ArrayList;
-053import java.util.Collections;
-054import java.util.List;
-055import java.util.Map;
-056import 
java.util.concurrent.ConcurrentHashMap;
-057import 
java.util.concurrent.atomic.AtomicInteger;
-058import java.util.regex.Pattern;
-059
-060import 
org.apache.hadoop.conf.Configuration;
-061import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-062import 
org.apache.hadoop.hbase.HRegionLocation;
-063import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-064import 
org.apache.hadoop.hbase.TableName;
-065import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-066import 
org.apache.hadoop.hbase.client.RegionLocator;
-067import 
org.apache.hadoop.hbase.client.ResultScanner;
-068import 
org.apache.hadoop.hbase.client.Table;
-069import 
org.apache.hadoop.hbase.client.TableDescriptor;
-070import 
org.apache.hadoop.hbase.security.UserProvider;
-071import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;

[25/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
index c4e8c8b..aa58108 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithoutResult.html
@@ -82,602 +82,613 @@
 074  public static final String 
USER_COPROCESSORS_ENABLED_CONF_KEY =
 075"hbase.coprocessor.user.enabled";
 076  public static final boolean 
DEFAULT_USER_COPROCESSORS_ENABLED = true;
-077
-078  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocessorHost.class);
-079  protected Abortable abortable;
-080  /** Ordered set of loaded coprocessors 
with lock */
-081  protected final SortedListE 
coprocEnvironments =
-082  new SortedList(new 
EnvironmentPriorityComparator());
-083  protected Configuration conf;
-084  // unique file prefix to use for local 
copies of jars when classloading
-085  protected String pathPrefix;
-086  protected AtomicInteger loadSequence = 
new AtomicInteger();
-087
-088  public CoprocessorHost(Abortable 
abortable) {
-089this.abortable = abortable;
-090this.pathPrefix = 
UUID.randomUUID().toString();
-091  }
-092
-093  /**
-094   * Not to be confused with the 
per-object _coprocessors_ (above),
-095   * coprocessorNames is static and 
stores the set of all coprocessors ever
-096   * loaded by any thread in this JVM. It 
is strictly additive: coprocessors are
-097   * added to coprocessorNames, by 
checkAndLoadInstance() but are never removed, since
-098   * the intention is to preserve a 
history of all loaded coprocessors for
-099   * diagnosis in case of server crash 
(HBASE-4014).
-100   */
-101  private static SetString 
coprocessorNames =
-102  Collections.synchronizedSet(new 
HashSetString());
-103
-104  public static SetString 
getLoadedCoprocessors() {
-105synchronized (coprocessorNames) {
-106  return new 
HashSet(coprocessorNames);
-107}
-108  }
-109
-110  /**
-111   * Used to create a parameter to the 
HServerLoad constructor so that
-112   * HServerLoad can provide information 
about the coprocessors loaded by this
-113   * regionserver.
-114   * (HBASE-4070: Improve region server 
metrics to report loaded coprocessors
-115   * to master).
-116   */
-117  public SetString 
getCoprocessors() {
-118SetString returnValue = new 
TreeSet();
-119for (E e: coprocEnvironments) {
-120  
returnValue.add(e.getInstance().getClass().getSimpleName());
-121}
-122return returnValue;
-123  }
-124
-125  /**
-126   * Load system coprocessors once only. 
Read the class names from configuration.
-127   * Called by constructor.
-128   */
-129  protected void 
loadSystemCoprocessors(Configuration conf, String confKey) {
-130boolean coprocessorsEnabled = 
conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
-131  DEFAULT_COPROCESSORS_ENABLED);
-132if (!coprocessorsEnabled) {
-133  return;
-134}
-135
-136Class? implClass;
-137
-138// load default coprocessors from 
configure file
-139String[] defaultCPClasses = 
conf.getStrings(confKey);
-140if (defaultCPClasses == null || 
defaultCPClasses.length == 0)
-141  return;
-142
-143int priority = 
Coprocessor.PRIORITY_SYSTEM;
-144for (String className : 
defaultCPClasses) {
-145  className = className.trim();
-146  if (findCoprocessor(className) != 
null) {
-147// If already loaded will just 
continue
-148LOG.warn("Attempted duplicate 
loading of " + className + "; skipped");
-149continue;
-150  }
-151  ClassLoader cl = 
this.getClass().getClassLoader();
-152  
Thread.currentThread().setContextClassLoader(cl);
-153  try {
-154implClass = 
cl.loadClass(className);
-155// Add coprocessors as we go to 
guard against case where a coprocessor is specified twice
-156// in the configuration
-157E env = 
checkAndLoadInstance(implClass, priority, conf);
-158if (env != null) {
-159  
this.coprocEnvironments.add(env);
-160  LOG.info("System coprocessor {} 
loaded, priority={}.", className, priority);
-161  ++priority;
-162}
-163  } catch (Throwable t) {
-164// We always abort if system 
coprocessors cannot be loaded
-165abortServer(className, t);
-166  }
-167}
-168  }
-169
-170  /**
-171   * Load a coprocessor implementation 
into the host
-172   * @param path path to implementation 
jar
-173   * @param className the main class 
name
-174   * @param priority 

[05/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.html 
b/testdevapidocs/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.html
index 895ce40..49fd434 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":9,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":9,"i7":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -107,9 +107,13 @@ var activeTableTab = "activeTableTab";
 
 
 
+
+Direct Known Subclasses:
+TestThrift2ServerCmdLine
+
 
 
-public class TestThriftServerCmdLine
+public class TestThriftServerCmdLine
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Start the HBase Thrift server on a random port through the 
command-line
  interface and talk to it from client side.
@@ -148,7 +152,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 cmdLineThread
 
 
-private 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType
+protected 
org.apache.hadoop.hbase.thrift.ImplType
 implType
 
 
@@ -156,27 +160,27 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 LOG
 
 
-private int
+protected int
 port
 
 
-private boolean
+protected boolean
 specifyBindIP
 
 
-private boolean
+protected boolean
 specifyCompact
 
 
-private boolean
+protected boolean
 specifyFramed
 
 
-private static boolean
+protected static boolean
 tableCreated
 
 
-private static HBaseTestingUtility
+protected static HBaseTestingUtility
 TEST_UTIL
 
 
@@ -198,7 +202,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Constructor and Description
 
 
-TestThriftServerCmdLine(org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplTypeimplType,
+TestThriftServerCmdLine(org.apache.hadoop.hbase.thrift.ImplTypeimplType,
booleanspecifyFramed,
booleanspecifyBindIP,
booleanspecifyCompact)
@@ -219,34 +223,38 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Method and Description
 
 
+protected 
org.apache.hadoop.hbase.thrift.ThriftServer
+createThriftServer()
+
+
 static https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
 getParameters()
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getParametersString()
 
-
+
 static void
 setUpBeforeClass()
 
-
+
 private void
 startCmdLineThread(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String[]args)
 
-
+
 private void
 stopCmdLineThread()
 
-
-private void
+
+protected void
 talkToThriftServer()
 
-
+
 static void
 tearDownAfterClass()
 
-
+
 void
 testRunThriftServer()
 
@@ -278,7 +286,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 CLASS_RULE
-public static finalHBaseClassTestRule CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
 
 
 
@@ -287,7 +295,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -296,7 +304,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 implType
-private 
finalorg.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType implType
+protected finalorg.apache.hadoop.hbase.thrift.ImplType implType
 
 
 
@@ -305,7 +313,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 specifyFramed
-privateboolean specifyFramed
+protectedboolean specifyFramed
 
 
 
@@ -314,7 +322,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 specifyBindIP
-privateboolean specifyBindIP
+protectedboolean specifyBindIP
 
 
 
@@ -323,7 +331,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 specifyCompact
-privateboolean specifyCompact
+protectedboolean specifyCompact
 
 
 
@@ -332,7 +340,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 TEST_UTIL
-private static finalHBaseTestingUtility TEST_UTIL

[18/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.HttpKerberosServerAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.HttpKerberosServerAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.HttpKerberosServerAction.html
index 4ddef9a..5828be2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.HttpKerberosServerAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.HttpKerberosServerAction.html
@@ -26,8 +26,8 @@
 018
 019package org.apache.hadoop.hbase.thrift;
 020
-021import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
-022import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SPNEGO_PRINCIPAL_KEY;
+021import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
+022import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_PRINCIPAL_KEY;
 023
 024import java.io.IOException;
 025import 
java.security.PrivilegedExceptionAction;
@@ -66,7 +66,7 @@
 058  private static final Logger LOG = 
LoggerFactory.getLogger(ThriftHttpServlet.class.getName());
 059  private final transient 
UserGroupInformation serviceUGI;
 060  private final transient 
UserGroupInformation httpUGI;
-061  private final transient 
ThriftServerRunner.HBaseHandler hbaseHandler;
+061  private final transient 
HBaseServiceHandler handler;
 062  private final boolean doAsEnabled;
 063  private final boolean 
securityEnabled;
 064
@@ -75,11 +75,11 @@
 067
 068  public ThriftHttpServlet(TProcessor 
processor, TProtocolFactory protocolFactory,
 069  UserGroupInformation serviceUGI, 
Configuration conf,
-070  ThriftServerRunner.HBaseHandler 
hbaseHandler, boolean securityEnabled, boolean doAsEnabled)
+070  HBaseServiceHandler handler, 
boolean securityEnabled, boolean doAsEnabled)
 071  throws IOException {
 072super(processor, protocolFactory);
 073this.serviceUGI = serviceUGI;
-074this.hbaseHandler = hbaseHandler;
+074this.handler = handler;
 075this.securityEnabled = 
securityEnabled;
 076this.doAsEnabled = doAsEnabled;
 077
@@ -154,7 +154,7 @@
 146  }
 147  effectiveUser = 
doAsUserFromQuery;
 148}
-149
hbaseHandler.setEffectiveUser(effectiveUser);
+149
handler.setEffectiveUser(effectiveUser);
 150super.doPost(request, response);
 151  }
 152

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.RemoteUserIdentity.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.RemoteUserIdentity.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.RemoteUserIdentity.html
index 4ddef9a..5828be2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.RemoteUserIdentity.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.RemoteUserIdentity.html
@@ -26,8 +26,8 @@
 018
 019package org.apache.hadoop.hbase.thrift;
 020
-021import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
-022import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SPNEGO_PRINCIPAL_KEY;
+021import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
+022import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_PRINCIPAL_KEY;
 023
 024import java.io.IOException;
 025import 
java.security.PrivilegedExceptionAction;
@@ -66,7 +66,7 @@
 058  private static final Logger LOG = 
LoggerFactory.getLogger(ThriftHttpServlet.class.getName());
 059  private final transient 
UserGroupInformation serviceUGI;
 060  private final transient 
UserGroupInformation httpUGI;
-061  private final transient 
ThriftServerRunner.HBaseHandler hbaseHandler;
+061  private final transient 
HBaseServiceHandler handler;
 062  private final boolean doAsEnabled;
 063  private final boolean 
securityEnabled;
 064
@@ -75,11 +75,11 @@
 067
 068  public ThriftHttpServlet(TProcessor 
processor, TProtocolFactory protocolFactory,
 069  UserGroupInformation serviceUGI, 
Configuration conf,
-070  ThriftServerRunner.HBaseHandler 
hbaseHandler, boolean securityEnabled, boolean doAsEnabled)
+070  HBaseServiceHandler handler, 
boolean securityEnabled, boolean doAsEnabled)
 071  throws IOException {
 072super(processor, protocolFactory);
 073this.serviceUGI = serviceUGI;
-074this.hbaseHandler = hbaseHandler;
+074this.handler = handler;
 075this.securityEnabled = 
securityEnabled;
 076this.doAsEnabled = doAsEnabled;
 077
@@ -154,7 +154,7 @@
 146  }
 147  effectiveUser = 
doAsUserFromQuery;
 

[44/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html
index ec24cd1..85b6088 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html
@@ -193,7 +193,7 @@ extends CoprocessorHost
-ABORT_ON_ERROR_KEY,
 abortable,
 conf,
 coprocEnvironments,
 COPROCESSORS_ENABLED_CONF_KEY,
 DEFAULT_ABORT_ON_ERROR,
 DEFAULT_COPROCESSORS_ENABLED,
 DEFAULT_USER_COPROCESSORS_ENABLED, loadSequence,
 MASTER_COPROCESSOR_CONF_KEY,
 pathPrefix,
 REGION_COPROCESSOR_CONF_KEY,
 REGIONSERVER_COPROCESSOR_CONF_KEY,
 USER_COPROCESSORS_ENABLED_CONF_KEY,
 USER_REGION_COPROCE
 SSOR_CONF_KEY, WAL_COPROCESSOR_CONF_KEY
+ABORT_ON_ERROR_KEY,
 abortable,
 conf,
 coprocEnvironments,
 COPROCESSORS_ENABLED_CONF_KEY,
 DEFAULT_ABORT_ON_ERROR,
 DEFAULT_COPROCESSORS_ENABLED,
 DEFAULT_SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR, DEFAULT_USER_COPROCESSORS_ENABLED,
 loadSequence,
 MASTER_COPROCESSOR_CONF_KEY,
 pathPrefix,
 REGION_COPROCESSOR_CONF_KEY,
 REGIONSERVER_COPROCESSOR_CONF_KEY,
 SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR, USER_COPROCESSORS_ENABLED_CONF_KEY,
 USER_REGION_COPROCESSOR_CONF_KEY,
 WAL_COPROCESSOR_CONF_KEY
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 7bacd5f..b8922b2 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -716,20 +716,20 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
-org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
-org.apache.hadoop.hbase.regionserver.ChunkCreator.ChunkType
 org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
-org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
 org.apache.hadoop.hbase.regionserver.ScanType
-org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
+org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
 org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
 org.apache.hadoop.hbase.regionserver.FlushType
-org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
+org.apache.hadoop.hbase.regionserver.ChunkCreator.ChunkType
+org.apache.hadoop.hbase.regionserver.Region.Operation
+org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
+org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 03a894e..1d583d6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, 

[38/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
deleted file mode 100644
index 5be3d57..000
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
+++ /dev/null
@@ -1,2385 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-ThriftServerRunner.HBaseHandler (Apache HBase 3.0.0-SNAPSHOT 
API)
-
-
-
-
-
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.thrift
-Class 
ThriftServerRunner.HBaseHandler
-
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler
-
-
-
-
-
-
-
-All Implemented Interfaces:
-org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
-
-
-Enclosing class:
-ThriftServerRunner
-
-
-
-public static class ThriftServerRunner.HBaseHandler
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
-The HBaseHandler is a glue object that connects Thrift RPC 
calls to the
- HBase client API primarily defined in the Admin and Table objects.
-
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-CLEANUP_INTERVAL
-
-
-(package private) IncrementCoalescer
-coalescer
-
-
-protected 
org.apache.hadoop.conf.Configuration
-conf
-
-
-private ConnectionCache
-connectionCache
-
-
-protected static org.slf4j.Logger
-LOG
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-MAX_IDLETIME
-
-
-private ThriftMetrics
-metrics
-
-
-protected int
-nextScannerId
-
-
-protected https://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,ThriftServerRunner.ResultScannerWrapper
-scannerMap
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Modifier
-Constructor and Description
-
-
-protected 
-HBaseHandler(org.apache.hadoop.conf.Configurationc,
-UserProvideruserProvider)
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All MethodsStatic MethodsInstance MethodsConcrete Methods
-
-Modifier and Type
-Method and Description
-
-
-protected int
-addScanner(ResultScannerscanner,
-  booleansortColumns)
-Assigns a unique ID to the scanner and adds the mapping to 
an internal
- hash-map.
-
-
-
-https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.thrift.generated.TCell
-append(org.apache.hadoop.hbase.thrift.generated.TAppendtappend)
-Appends values to one or more columns within a single 
row.
-
-
-
-protected long

[08/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftServer.html
index 783dc34..5898688 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftServer.html
@@ -26,609 +26,99 @@
 018 */
 019package 
org.apache.hadoop.hbase.thrift2;
 020
-021import java.io.IOException;
-022import java.net.InetAddress;
-023import java.net.InetSocketAddress;
-024import java.net.UnknownHostException;
-025import java.security.PrivilegedAction;
-026import java.util.Map;
-027import 
java.util.concurrent.ExecutorService;
-028import 
java.util.concurrent.LinkedBlockingQueue;
-029import 
java.util.concurrent.SynchronousQueue;
-030import 
java.util.concurrent.ThreadPoolExecutor;
-031import java.util.concurrent.TimeUnit;
-032
-033import 
javax.security.auth.callback.Callback;
-034import 
javax.security.auth.callback.UnsupportedCallbackException;
-035import 
javax.security.sasl.AuthorizeCallback;
-036import javax.security.sasl.SaslServer;
-037
-038import 
org.apache.hadoop.conf.Configuration;
-039import 
org.apache.hadoop.conf.Configured;
-040import 
org.apache.hadoop.hbase.HBaseConfiguration;
-041import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-042import 
org.apache.hadoop.hbase.filter.ParseFilter;
-043import 
org.apache.hadoop.hbase.http.InfoServer;
-044import 
org.apache.hadoop.hbase.security.SaslUtil;
-045import 
org.apache.hadoop.hbase.security.SecurityUtil;
-046import 
org.apache.hadoop.hbase.security.UserProvider;
-047import 
org.apache.hadoop.hbase.thrift.CallQueue;
-048import 
org.apache.hadoop.hbase.thrift.THBaseThreadPoolExecutor;
-049import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;
-050import 
org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-051import 
org.apache.hadoop.hbase.util.DNS;
-052import 
org.apache.hadoop.hbase.util.JvmPauseMonitor;
-053import 
org.apache.hadoop.hbase.util.Strings;
-054import 
org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-055import 
org.apache.hadoop.security.UserGroupInformation;
-056import org.apache.hadoop.util.Tool;
-057import 
org.apache.hadoop.util.ToolRunner;
-058import org.apache.thrift.TException;
-059import org.apache.thrift.TProcessor;
-060import 
org.apache.thrift.protocol.TBinaryProtocol;
-061import 
org.apache.thrift.protocol.TCompactProtocol;
-062import 
org.apache.thrift.protocol.TProtocol;
-063import 
org.apache.thrift.protocol.TProtocolFactory;
-064import 
org.apache.thrift.server.THsHaServer;
-065import 
org.apache.thrift.server.TNonblockingServer;
-066import 
org.apache.thrift.server.TServer;
-067import 
org.apache.thrift.server.TThreadPoolServer;
-068import 
org.apache.thrift.server.TThreadedSelectorServer;
-069import 
org.apache.thrift.transport.TFramedTransport;
-070import 
org.apache.thrift.transport.TNonblockingServerSocket;
-071import 
org.apache.thrift.transport.TNonblockingServerTransport;
-072import 
org.apache.thrift.transport.TSaslServerTransport;
-073import 
org.apache.thrift.transport.TServerSocket;
-074import 
org.apache.thrift.transport.TServerTransport;
-075import 
org.apache.thrift.transport.TTransportException;
-076import 
org.apache.thrift.transport.TTransportFactory;
-077import 
org.apache.yetus.audience.InterfaceAudience;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-081import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-082import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
-083import 
org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
-084import 
org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
-085import 
org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
-086import 
org.apache.hbase.thirdparty.org.apache.commons.cli.OptionGroup;
-087import 
org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
-088import 
org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
-089
-090/**
-091 * ThriftServer - this class starts up a 
Thrift server which implements the HBase API specified in
-092 * the HbaseClient.thrift IDL file.
-093 */
-094@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-095@SuppressWarnings({ "rawtypes", 
"unchecked" })
-096public class ThriftServer extends 
Configured implements Tool {
-097  private static final Logger log = 
LoggerFactory.getLogger(ThriftServer.class);
-098
-099  /**
-100   * Thrift quality of protection 
configuration key. Valid values can be:
-101   * privacy: authentication, integrity 
and confidentiality checking
-102   * integrity: authentication and 
integrity checking

[24/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
index c4e8c8b..aa58108 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
@@ -82,602 +82,613 @@
 074  public static final String 
USER_COPROCESSORS_ENABLED_CONF_KEY =
 075"hbase.coprocessor.user.enabled";
 076  public static final boolean 
DEFAULT_USER_COPROCESSORS_ENABLED = true;
-077
-078  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocessorHost.class);
-079  protected Abortable abortable;
-080  /** Ordered set of loaded coprocessors 
with lock */
-081  protected final SortedListE 
coprocEnvironments =
-082  new SortedList(new 
EnvironmentPriorityComparator());
-083  protected Configuration conf;
-084  // unique file prefix to use for local 
copies of jars when classloading
-085  protected String pathPrefix;
-086  protected AtomicInteger loadSequence = 
new AtomicInteger();
-087
-088  public CoprocessorHost(Abortable 
abortable) {
-089this.abortable = abortable;
-090this.pathPrefix = 
UUID.randomUUID().toString();
-091  }
-092
-093  /**
-094   * Not to be confused with the 
per-object _coprocessors_ (above),
-095   * coprocessorNames is static and 
stores the set of all coprocessors ever
-096   * loaded by any thread in this JVM. It 
is strictly additive: coprocessors are
-097   * added to coprocessorNames, by 
checkAndLoadInstance() but are never removed, since
-098   * the intention is to preserve a 
history of all loaded coprocessors for
-099   * diagnosis in case of server crash 
(HBASE-4014).
-100   */
-101  private static SetString 
coprocessorNames =
-102  Collections.synchronizedSet(new 
HashSetString());
-103
-104  public static SetString 
getLoadedCoprocessors() {
-105synchronized (coprocessorNames) {
-106  return new 
HashSet(coprocessorNames);
-107}
-108  }
-109
-110  /**
-111   * Used to create a parameter to the 
HServerLoad constructor so that
-112   * HServerLoad can provide information 
about the coprocessors loaded by this
-113   * regionserver.
-114   * (HBASE-4070: Improve region server 
metrics to report loaded coprocessors
-115   * to master).
-116   */
-117  public SetString 
getCoprocessors() {
-118SetString returnValue = new 
TreeSet();
-119for (E e: coprocEnvironments) {
-120  
returnValue.add(e.getInstance().getClass().getSimpleName());
-121}
-122return returnValue;
-123  }
-124
-125  /**
-126   * Load system coprocessors once only. 
Read the class names from configuration.
-127   * Called by constructor.
-128   */
-129  protected void 
loadSystemCoprocessors(Configuration conf, String confKey) {
-130boolean coprocessorsEnabled = 
conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
-131  DEFAULT_COPROCESSORS_ENABLED);
-132if (!coprocessorsEnabled) {
-133  return;
-134}
-135
-136Class? implClass;
-137
-138// load default coprocessors from 
configure file
-139String[] defaultCPClasses = 
conf.getStrings(confKey);
-140if (defaultCPClasses == null || 
defaultCPClasses.length == 0)
-141  return;
-142
-143int priority = 
Coprocessor.PRIORITY_SYSTEM;
-144for (String className : 
defaultCPClasses) {
-145  className = className.trim();
-146  if (findCoprocessor(className) != 
null) {
-147// If already loaded will just 
continue
-148LOG.warn("Attempted duplicate 
loading of " + className + "; skipped");
-149continue;
-150  }
-151  ClassLoader cl = 
this.getClass().getClassLoader();
-152  
Thread.currentThread().setContextClassLoader(cl);
-153  try {
-154implClass = 
cl.loadClass(className);
-155// Add coprocessors as we go to 
guard against case where a coprocessor is specified twice
-156// in the configuration
-157E env = 
checkAndLoadInstance(implClass, priority, conf);
-158if (env != null) {
-159  
this.coprocEnvironments.add(env);
-160  LOG.info("System coprocessor {} 
loaded, priority={}.", className, priority);
-161  ++priority;
-162}
-163  } catch (Throwable t) {
-164// We always abort if system 
coprocessors cannot be loaded
-165abortServer(className, t);
-166  }
-167}
-168  }
-169
-170  /**
-171   * Load a coprocessor implementation 
into the host
-172   * @param path path to implementation 
jar
-173   * @param className the main class 
name
-174   * @param priority chaining priority
-175   * @param conf configuration for 
coprocessor
-176   * @throws java.io.IOException 
Exception
-177   */
-178  public E load(Path path, 

[49/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index e0062c0..117e755 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2019 The Apache Software Foundation
 
-  File: 3834,
- Errors: 14667,
+  File: 3840,
+ Errors: 14666,
  Warnings: 0,
  Infos: 0
   
@@ -12352,6 +12352,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.Constants.java;>org/apache/hadoop/hbase/thrift/Constants.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.generated.TCell.java;>org/apache/hadoop/hbase/thrift/generated/TCell.java
 
 
@@ -13500,6 +13514,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.HBaseServiceHandler.java;>org/apache/hadoop/hbase/thrift/HBaseServiceHandler.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.TestFSHDFSUtils.java;>org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
 
 
@@ -14536,6 +14564,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift2.TestThrift2ServerCmdLine.java;>org/apache/hadoop/hbase/thrift2/TestThrift2ServerCmdLine.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.wal.AbstractTestProtobufLog.java;>org/apache/hadoop/hbase/regionserver/wal/AbstractTestProtobufLog.java
 
 
@@ -19268,6 +19310,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coprocessor.TestRegionCoprocessorHost.java;>org/apache/hadoop/hbase/coprocessor/TestRegionCoprocessorHost.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.ProcedureYieldException.java;>org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
 
 
@@ -37253,7 +37309,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -38896,6 +38952,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.ImplType.java;>org/apache/hadoop/hbase/thrift/ImplType.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.TestChecksum.java;>org/apache/hadoop/hbase/io/hfile/TestChecksum.java
 
 
@@ -42676,20 +42746,6 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.ThriftServerRunner.java;>org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
-
-
-  0
-
-
-  0
-
-
-  0
-
-  
-  
-
   

[30/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index 97deec1..2ea250a 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1364,7 +1364,11 @@
 
 
 org.apache.hadoop.hbase.mapreduce.SyncTable (implements 
org.apache.hadoop.util.Tool)
-org.apache.hadoop.hbase.thrift2.ThriftServer (implements 
org.apache.hadoop.util.Tool)
+org.apache.hadoop.hbase.thrift.ThriftServer (implements 
org.apache.hadoop.util.Tool)
+
+org.apache.hadoop.hbase.thrift2.ThriftServer
+
+
 org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication (implements 
org.apache.hadoop.util.Tool)
 org.apache.hadoop.hbase.mapreduce.WALPlayer (implements 
org.apache.hadoop.util.Tool)
 org.apache.hadoop.hbase.zookeeper.ZKAclReset (implements 
org.apache.hadoop.util.Tool)
@@ -1387,6 +1391,7 @@
 org.apache.hadoop.hbase.client.ConnectionImplementation.ServerErrorTracker
 org.apache.hadoop.hbase.client.ConnectionImplementation.ServerErrorTracker.ServerErrors
 org.apache.hadoop.hbase.client.ConnectionUtils
+org.apache.hadoop.hbase.thrift.Constants
 org.apache.hadoop.hbase.constraint.ConstraintProcessor (implements 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
 org.apache.hadoop.hbase.constraint.Constraints
 org.apache.hadoop.hbase.io.crypto.Context (implements 
org.apache.hadoop.conf.Configurable)
@@ -1867,6 +1872,12 @@
 org.apache.hadoop.hbase.security.HBaseSaslRpcServer
 org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslDigestCallbackHandler 
(implements javax.security.auth.callback.https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/CallbackHandler.html?is-external=true;
 title="class or interface in 
javax.security.auth.callback">CallbackHandler)
 org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandler 
(implements javax.security.auth.callback.https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/CallbackHandler.html?is-external=true;
 title="class or interface in 
javax.security.auth.callback">CallbackHandler)
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler (implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface)
+org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler (implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
+
+
 org.apache.hadoop.hbase.HColumnDescriptor (implements 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor, 
java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT)
 
 org.apache.hadoop.hbase.client.ImmutableHColumnDescriptor
@@ -3842,15 +3853,10 @@
 org.apache.hadoop.hbase.util.Threads
 org.apache.hadoop.hbase.util.Threads.PrintThreadInfoLazyHolder
 org.apache.hadoop.hbase.regionserver.ThreadSafeMemStoreSizing (implements 
org.apache.hadoop.hbase.regionserver.MemStoreSizing)
-org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler (implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface)
-org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler.THBaseServiceMetricsProxy
 (implements java.lang.reflect.https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/InvocationHandler.html?is-external=true;
 title="class or interface in java.lang.reflect">InvocationHandler)
+org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler.ResultScannerWrapper
 org.apache.hadoop.hbase.thrift.ThriftHttpServlet.HttpKerberosServerAction 
(implements java.security.https://docs.oracle.com/javase/8/docs/api/java/security/PrivilegedExceptionAction.html?is-external=true;
 title="class or interface in 
java.security">PrivilegedExceptionActionT)
 org.apache.hadoop.hbase.thrift.ThriftHttpServlet.RemoteUserIdentity
 org.apache.hadoop.hbase.thrift.ThriftMetrics
-org.apache.hadoop.hbase.thrift.ThriftServer
-org.apache.hadoop.hbase.thrift.ThriftServerRunner (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable)
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler (implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.ResultScannerWrapper
 org.apache.hadoop.hbase.thrift2.ThriftUtilities
 org.apache.hadoop.hbase.thrift.ThriftUtilities
 org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil
@@ -4085,7 +4091,7 @@
 
 org.apache.hadoop.hbase.thrift.generated.IOError (implements 
java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 

[27/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
index c4e8c8b..aa58108 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
@@ -82,602 +82,613 @@
 074  public static final String 
USER_COPROCESSORS_ENABLED_CONF_KEY =
 075"hbase.coprocessor.user.enabled";
 076  public static final boolean 
DEFAULT_USER_COPROCESSORS_ENABLED = true;
-077
-078  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocessorHost.class);
-079  protected Abortable abortable;
-080  /** Ordered set of loaded coprocessors 
with lock */
-081  protected final SortedListE 
coprocEnvironments =
-082  new SortedList(new 
EnvironmentPriorityComparator());
-083  protected Configuration conf;
-084  // unique file prefix to use for local 
copies of jars when classloading
-085  protected String pathPrefix;
-086  protected AtomicInteger loadSequence = 
new AtomicInteger();
-087
-088  public CoprocessorHost(Abortable 
abortable) {
-089this.abortable = abortable;
-090this.pathPrefix = 
UUID.randomUUID().toString();
-091  }
-092
-093  /**
-094   * Not to be confused with the 
per-object _coprocessors_ (above),
-095   * coprocessorNames is static and 
stores the set of all coprocessors ever
-096   * loaded by any thread in this JVM. It 
is strictly additive: coprocessors are
-097   * added to coprocessorNames, by 
checkAndLoadInstance() but are never removed, since
-098   * the intention is to preserve a 
history of all loaded coprocessors for
-099   * diagnosis in case of server crash 
(HBASE-4014).
-100   */
-101  private static SetString 
coprocessorNames =
-102  Collections.synchronizedSet(new 
HashSetString());
-103
-104  public static SetString 
getLoadedCoprocessors() {
-105synchronized (coprocessorNames) {
-106  return new 
HashSet(coprocessorNames);
-107}
-108  }
-109
-110  /**
-111   * Used to create a parameter to the 
HServerLoad constructor so that
-112   * HServerLoad can provide information 
about the coprocessors loaded by this
-113   * regionserver.
-114   * (HBASE-4070: Improve region server 
metrics to report loaded coprocessors
-115   * to master).
-116   */
-117  public SetString 
getCoprocessors() {
-118SetString returnValue = new 
TreeSet();
-119for (E e: coprocEnvironments) {
-120  
returnValue.add(e.getInstance().getClass().getSimpleName());
-121}
-122return returnValue;
-123  }
-124
-125  /**
-126   * Load system coprocessors once only. 
Read the class names from configuration.
-127   * Called by constructor.
-128   */
-129  protected void 
loadSystemCoprocessors(Configuration conf, String confKey) {
-130boolean coprocessorsEnabled = 
conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
-131  DEFAULT_COPROCESSORS_ENABLED);
-132if (!coprocessorsEnabled) {
-133  return;
-134}
-135
-136Class? implClass;
-137
-138// load default coprocessors from 
configure file
-139String[] defaultCPClasses = 
conf.getStrings(confKey);
-140if (defaultCPClasses == null || 
defaultCPClasses.length == 0)
-141  return;
-142
-143int priority = 
Coprocessor.PRIORITY_SYSTEM;
-144for (String className : 
defaultCPClasses) {
-145  className = className.trim();
-146  if (findCoprocessor(className) != 
null) {
-147// If already loaded will just 
continue
-148LOG.warn("Attempted duplicate 
loading of " + className + "; skipped");
-149continue;
-150  }
-151  ClassLoader cl = 
this.getClass().getClassLoader();
-152  
Thread.currentThread().setContextClassLoader(cl);
-153  try {
-154implClass = 
cl.loadClass(className);
-155// Add coprocessors as we go to 
guard against case where a coprocessor is specified twice
-156// in the configuration
-157E env = 
checkAndLoadInstance(implClass, priority, conf);
-158if (env != null) {
-159  
this.coprocEnvironments.add(env);
-160  LOG.info("System coprocessor {} 
loaded, priority={}.", className, priority);
-161  ++priority;
-162}
-163  } catch (Throwable t) {
-164// We always abort if system 
coprocessors cannot be loaded
-165abortServer(className, t);
-166  }
-167}
-168  }
-169
-170  /**
-171   * Load a coprocessor implementation 
into the host
-172   * @param path path to implementation 
jar
-173   * @param className the main class 
name
-174   * @param priority chaining priority
-175   * @param conf configuration for 
coprocessor

[16/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
deleted file mode 100644
index e692633..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
+++ /dev/null
@@ -1,2103 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/*
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.thrift;
-020
-021import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
-022
-023import java.io.IOException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.security.PrivilegedAction;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.TreeMap;
-036import 
java.util.concurrent.BlockingQueue;
-037import 
java.util.concurrent.ExecutorService;
-038import 
java.util.concurrent.LinkedBlockingQueue;
-039import 
java.util.concurrent.ThreadPoolExecutor;
-040import java.util.concurrent.TimeUnit;
-041
-042import 
javax.security.auth.callback.Callback;
-043import 
javax.security.auth.callback.UnsupportedCallbackException;
-044import 
javax.security.sasl.AuthorizeCallback;
-045import javax.security.sasl.SaslServer;
-046
-047import 
org.apache.commons.lang3.ArrayUtils;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.Cell.Type;
-050import 
org.apache.hadoop.hbase.CellBuilder;
-051import 
org.apache.hadoop.hbase.CellBuilderFactory;
-052import 
org.apache.hadoop.hbase.CellBuilderType;
-053import 
org.apache.hadoop.hbase.CellUtil;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HColumnDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.KeyValue;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.ServerName;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.client.Admin;
-065import 
org.apache.hadoop.hbase.client.Append;
-066import 
org.apache.hadoop.hbase.client.Delete;
-067import 
org.apache.hadoop.hbase.client.Durability;
-068import 
org.apache.hadoop.hbase.client.Get;
-069import 
org.apache.hadoop.hbase.client.Increment;
-070import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
-071import 
org.apache.hadoop.hbase.client.Put;
-072import 
org.apache.hadoop.hbase.client.RegionInfo;
-073import 
org.apache.hadoop.hbase.client.RegionLocator;
-074import 
org.apache.hadoop.hbase.client.Result;
-075import 
org.apache.hadoop.hbase.client.ResultScanner;
-076import 
org.apache.hadoop.hbase.client.Scan;
-077import 
org.apache.hadoop.hbase.client.Table;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.ParseFilter;
-080import 
org.apache.hadoop.hbase.filter.PrefixFilter;
-081import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-082import 
org.apache.hadoop.hbase.http.HttpServerUtil;
-083import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-084import 
org.apache.hadoop.hbase.security.SaslUtil;
-085import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-086import 
org.apache.hadoop.hbase.security.SecurityUtil;
-087import 
org.apache.hadoop.hbase.security.UserProvider;
-088import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-089import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-090import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-091import 

[45/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
index 229d724..48fb206 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
@@ -222,38 +222,46 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 static boolean
-DEFAULT_USER_COPROCESSORS_ENABLED
+DEFAULT_SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
 
 
+static boolean
+DEFAULT_USER_COPROCESSORS_ENABLED
+
+
 private static https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends Coprocessor
 legacyWarning
 Used to limit legacy handling to once per Coprocessor class 
per classloader.
 
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger
 loadSequence
 
-
+
 private static org.slf4j.Logger
 LOG
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 MASTER_COPROCESSOR_CONF_KEY
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 pathPrefix
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_COPROCESSOR_CONF_KEY
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_COPROCESSOR_CONF_KEY
 
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 USER_COPROCESSORS_ENABLED_CONF_KEY
@@ -596,13 +604,39 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+
+
+
+
+
+SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+DEFAULT_SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+public static finalboolean DEFAULT_SKIP_LOAD_DUPLICATE_TABLE_COPROCESSOR
+
+See Also:
+Constant
 Field Values
+
+
+
 
 
 
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -611,7 +645,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 abortable
-protectedAbortable abortable
+protectedAbortable abortable
 
 
 
@@ -620,7 +654,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 coprocEnvironments
-protected finalSortedListE extends CoprocessorEnvironmentC coprocEnvironments
+protected finalSortedListE extends CoprocessorEnvironmentC coprocEnvironments
 Ordered set of loaded coprocessors with lock
 
 
@@ -630,7 +664,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 conf
-protectedorg.apache.hadoop.conf.Configuration conf
+protectedorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -639,7 +673,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 pathPrefix
-protectedhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String pathPrefix
+protectedhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String pathPrefix
 
 
 
@@ -648,7 +682,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 loadSequence
-protectedhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger loadSequence
+protectedhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger loadSequence
 
 
 
@@ -657,7 +691,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 coprocessorNames
-private statichttps://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 

[31/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html 
b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
index c13d3bf..a59130a 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":10,"i3":10,"i4":10,"i5":9,"i6":10,"i7":10,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":10,"i19":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":9,"i4":10,"i5":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -103,12 +103,17 @@ var activeTableTab = "activeTableTab";
 org.apache.hadoop.conf.Configured
 
 
+org.apache.hadoop.hbase.thrift.ThriftServer
+
+
 org.apache.hadoop.hbase.thrift2.ThriftServer
 
 
 
 
 
+
+
 
 
 
@@ -119,9 +124,8 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class ThriftServer
-extends org.apache.hadoop.conf.Configured
-implements org.apache.hadoop.util.Tool
+public class ThriftServer
+extends ThriftServer
 ThriftServer - this class starts up a Thrift server which 
implements the HBase API specified in
  the HbaseClient.thrift IDL file.
 
@@ -143,39 +147,17 @@ implements org.apache.hadoop.util.Tool
 Field and Description
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-BACKLOG_CONF_KEY
-
-
-static int
-DEFAULT_LISTEN_PORT
-
-
 private static org.slf4j.Logger
 log
 
-
-private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-READ_TIMEOUT_OPTION
-
-
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-THRIFT_QOP_KEY
-Thrift quality of protection configuration key.
-
-
-
-static int
-THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT
-
-
-static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY
-Amount of time in milliseconds before a server thread will 
timeout
- waiting for client to send data on a connected socket.
-
-
 
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.thrift.ThriftServer
+conf,
 doAsEnabled,
 hbaseServiceHandler,
 host,
 httpEnabled,
 httpServer,
 infoServer,
 listenPort,
 metrics,
 pau
 seMonitor, processor,
 qop,
 securityEnabled,
 serviceUGI,
 tserver
+
 
 
 
@@ -190,7 +172,7 @@ implements org.apache.hadoop.util.Tool
 Constructor and Description
 
 
-ThriftServer()
+ThriftServer(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -208,140 +190,45 @@ implements org.apache.hadoop.util.Tool
 Method and Description
 
 
-private static https://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddress
-bindToPort(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbindValue,
-  intlistenPort)
+protected void
+addOptions(org.apache.hbase.thirdparty.org.apache.commons.cli.Optionsoptions)
+Add options to command lines
+
 
 
-private static https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorService
-createExecutor(intworkerThreads,
-  intmaxCallQueueSize,
-  ThriftMetricsmetrics)
+protected HBaseServiceHandler
+createHandler(org.apache.hadoop.conf.Configurationconf,
+ UserProvideruserProvider)
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-getBindAddress(org.apache.hadoop.conf.Configurationconf,
-  
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmd)
+protected org.apache.thrift.TProcessor
+createProcessor()
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-getImplType(booleannonblocking,
-   booleanhsha,
-   booleanselector)
-
-
-private int
-getListenPort(org.apache.hadoop.conf.Configurationconf,
- 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmd)
-
-
-private static 
org.apache.hbase.thirdparty.org.apache.commons.cli.Options
-getOptions()
-
-
-private int

[01/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 33046fea5 -> 849d84a8e


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.html
index ba3df71..f983a2d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.html
@@ -26,247 +26,242 @@
 018 */
 019package org.apache.hadoop.hbase.thrift;
 020
-021import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_KERBEROS_PRINCIPAL_KEY;
-022import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_KEYTAB_FILE_KEY;
-023import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
-024import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SPNEGO_PRINCIPAL_KEY;
-025import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.THRIFT_SUPPORT_PROXYUSER_KEY;
-026import static 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.USE_HTTP_CONF_KEY;
-027import static 
org.junit.Assert.assertFalse;
-028import static 
org.junit.Assert.assertNotNull;
-029import static 
org.junit.Assert.assertTrue;
+021import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SUPPORT_PROXYUSER_KEY;
+022import static 
org.junit.Assert.assertFalse;
+023import static 
org.junit.Assert.assertNotNull;
+024import static 
org.junit.Assert.assertTrue;
+025
+026import java.io.File;
+027import java.security.Principal;
+028import 
java.security.PrivilegedExceptionAction;
+029import java.util.Set;
 030
-031import java.io.File;
-032import java.security.Principal;
-033import 
java.security.PrivilegedExceptionAction;
-034import java.util.Set;
-035
-036import javax.security.auth.Subject;
-037import 
javax.security.auth.kerberos.KerberosTicket;
-038
-039import org.apache.commons.io.FileUtils;
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-045import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-046import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-047import 
org.apache.hadoop.hbase.thrift.generated.Hbase;
-048import 
org.apache.hadoop.security.authentication.util.KerberosName;
-049import org.apache.http.HttpHeaders;
-050import 
org.apache.http.auth.AuthSchemeProvider;
-051import org.apache.http.auth.AuthScope;
-052import 
org.apache.http.auth.KerberosCredentials;
-053import 
org.apache.http.client.config.AuthSchemes;
-054import org.apache.http.config.Lookup;
-055import 
org.apache.http.config.RegistryBuilder;
-056import 
org.apache.http.impl.auth.SPNegoSchemeFactory;
-057import 
org.apache.http.impl.client.BasicCredentialsProvider;
-058import 
org.apache.http.impl.client.CloseableHttpClient;
-059import 
org.apache.http.impl.client.HttpClients;
-060import 
org.apache.kerby.kerberos.kerb.KrbException;
-061import 
org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
-062import 
org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
-063import 
org.apache.thrift.protocol.TBinaryProtocol;
-064import 
org.apache.thrift.protocol.TProtocol;
-065import 
org.apache.thrift.transport.THttpClient;
-066import org.ietf.jgss.GSSCredential;
-067import org.ietf.jgss.GSSManager;
-068import org.ietf.jgss.GSSName;
-069import org.ietf.jgss.Oid;
-070import org.junit.AfterClass;
-071import org.junit.BeforeClass;
-072import org.junit.ClassRule;
-073import 
org.junit.experimental.categories.Category;
-074import org.slf4j.Logger;
-075import org.slf4j.LoggerFactory;
-076
-077/**
-078 * Start the HBase Thrift HTTP server on 
a random port through the command-line
-079 * interface and talk to it from client 
side with SPNEGO security enabled.
-080 */
-081@Category({ClientTests.class, 
LargeTests.class})
-082public class TestThriftSpnegoHttpServer 
extends TestThriftHttpServer {
-083  @ClassRule
-084  public static final HBaseClassTestRule 
CLASS_RULE =
-085
HBaseClassTestRule.forClass(TestThriftSpnegoHttpServer.class);
-086
-087  private static final Logger LOG =
-088
LoggerFactory.getLogger(TestThriftSpnegoHttpServer.class);
+031import javax.security.auth.Subject;
+032import 
javax.security.auth.kerberos.KerberosTicket;
+033
+034import org.apache.commons.io.FileUtils;
+035import 
org.apache.hadoop.conf.Configuration;
+036import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+037import 
org.apache.hadoop.hbase.HBaseTestingUtility;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 

[41/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
index 352a9b5..7a0615b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.FullyQualifiedRow.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class IncrementCoalescer.FullyQualifiedRow
+static class IncrementCoalescer.FullyQualifiedRow
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Used to identify a cell that will be incremented.
 
@@ -253,7 +253,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 table
-privatebyte[] table
+privatebyte[] table
 
 
 
@@ -262,7 +262,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 rowKey
-privatebyte[] rowKey
+privatebyte[] rowKey
 
 
 
@@ -271,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 family
-privatebyte[] family
+privatebyte[] family
 
 
 
@@ -280,7 +280,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 qualifier
-privatebyte[] qualifier
+privatebyte[] qualifier
 
 
 
@@ -297,7 +297,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 FullyQualifiedRow
-publicFullyQualifiedRow(byte[]table,
+publicFullyQualifiedRow(byte[]table,
  byte[]rowKey,
  byte[]fam,
  byte[]qual)
@@ -317,7 +317,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getTable
-publicbyte[]getTable()
+publicbyte[]getTable()
 
 
 
@@ -326,7 +326,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setTable
-publicvoidsetTable(byte[]table)
+publicvoidsetTable(byte[]table)
 
 
 
@@ -335,7 +335,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getRowKey
-publicbyte[]getRowKey()
+publicbyte[]getRowKey()
 
 
 
@@ -344,7 +344,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setRowKey
-publicvoidsetRowKey(byte[]rowKey)
+publicvoidsetRowKey(byte[]rowKey)
 
 
 
@@ -353,7 +353,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getFamily
-publicbyte[]getFamily()
+publicbyte[]getFamily()
 
 
 
@@ -362,7 +362,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setFamily
-publicvoidsetFamily(byte[]fam)
+publicvoidsetFamily(byte[]fam)
 
 
 
@@ -371,7 +371,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getQualifier
-publicbyte[]getQualifier()
+publicbyte[]getQualifier()
 
 
 
@@ -380,7 +380,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setQualifier
-publicvoidsetQualifier(byte[]qual)
+publicvoidsetQualifier(byte[]qual)
 
 
 
@@ -389,7 +389,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 hashCode
-publicinthashCode()
+publicinthashCode()
 
 Overrides:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCodein 
classhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -402,7 +402,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 equals
-publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
 
 Overrides:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equalsin 
classhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.html
index 8cd7000..7ed1cd4 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/IncrementCoalescer.html
@@ -49,7 +49,7 @@ 

[21/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.IOErrorWithCause.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.IOErrorWithCause.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.IOErrorWithCause.html
new file mode 100644
index 000..5b5b199
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.IOErrorWithCause.html
@@ -0,0 +1,1419 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase.thrift;
+021
+022import static 
org.apache.hadoop.hbase.thrift.Constants.COALESCE_INC_KEY;
+023import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
+024
+025import java.io.IOException;
+026import java.nio.ByteBuffer;
+027import java.util.ArrayList;
+028import java.util.Collections;
+029import java.util.HashMap;
+030import java.util.List;
+031import java.util.Map;
+032import java.util.TreeMap;
+033
+034import 
org.apache.hadoop.conf.Configuration;
+035import org.apache.hadoop.hbase.Cell;
+036import 
org.apache.hadoop.hbase.CellBuilder;
+037import 
org.apache.hadoop.hbase.CellBuilderFactory;
+038import 
org.apache.hadoop.hbase.CellBuilderType;
+039import 
org.apache.hadoop.hbase.CellUtil;
+040import 
org.apache.hadoop.hbase.HColumnDescriptor;
+041import 
org.apache.hadoop.hbase.HConstants;
+042import 
org.apache.hadoop.hbase.HRegionLocation;
+043import 
org.apache.hadoop.hbase.HTableDescriptor;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.MetaTableAccessor;
+046import 
org.apache.hadoop.hbase.ServerName;
+047import 
org.apache.hadoop.hbase.TableName;
+048import 
org.apache.hadoop.hbase.TableNotFoundException;
+049import 
org.apache.hadoop.hbase.client.Append;
+050import 
org.apache.hadoop.hbase.client.Delete;
+051import 
org.apache.hadoop.hbase.client.Durability;
+052import 
org.apache.hadoop.hbase.client.Get;
+053import 
org.apache.hadoop.hbase.client.Increment;
+054import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
+055import 
org.apache.hadoop.hbase.client.Put;
+056import 
org.apache.hadoop.hbase.client.RegionInfo;
+057import 
org.apache.hadoop.hbase.client.RegionLocator;
+058import 
org.apache.hadoop.hbase.client.Result;
+059import 
org.apache.hadoop.hbase.client.ResultScanner;
+060import 
org.apache.hadoop.hbase.client.Scan;
+061import 
org.apache.hadoop.hbase.client.Table;
+062import 
org.apache.hadoop.hbase.filter.Filter;
+063import 
org.apache.hadoop.hbase.filter.ParseFilter;
+064import 
org.apache.hadoop.hbase.filter.PrefixFilter;
+065import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
+066import 
org.apache.hadoop.hbase.security.UserProvider;
+067import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
+068import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
+069import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
+070import 
org.apache.hadoop.hbase.thrift.generated.Hbase;
+071import 
org.apache.hadoop.hbase.thrift.generated.IOError;
+072import 
org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
+073import 
org.apache.hadoop.hbase.thrift.generated.Mutation;
+074import 
org.apache.hadoop.hbase.thrift.generated.TAppend;
+075import 
org.apache.hadoop.hbase.thrift.generated.TCell;
+076import 
org.apache.hadoop.hbase.thrift.generated.TIncrement;
+077import 
org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
+078import 
org.apache.hadoop.hbase.thrift.generated.TRowResult;
+079import 
org.apache.hadoop.hbase.thrift.generated.TScan;
+080import 
org.apache.hadoop.hbase.util.Bytes;
+081import org.apache.thrift.TException;
+082import 
org.apache.yetus.audience.InterfaceAudience;
+083import org.slf4j.Logger;
+084import org.slf4j.LoggerFactory;
+085
+086import 
org.apache.hbase.thirdparty.com.google.common.base.Throwables;
+087
+088/**
+089 * The HBaseServiceHandler is a glue 
object that connects Thrift 

[47/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 0d98b02..9bedc3e 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1378,7 +1378,7 @@
 
 Add an attribute to the table descriptor
 
-addAttributes(OperationWithAttributes,
 MapByteBuffer, ByteBuffer) - Static method in class 
org.apache.hadoop.hbase.thrift.ThriftServerRunner
+addAttributes(OperationWithAttributes,
 MapByteBuffer, ByteBuffer) - Static method in class 
org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler
 
 Adds all the attributes into the Operation object
 
@@ -2093,6 +2093,12 @@
 
 addOptions()
 - Method in class org.apache.hadoop.hbase.snapshot.SnapshotInfo
 
+addOptions(Options)
 - Method in class org.apache.hadoop.hbase.thrift.ThriftServer
+
+Add options to command lines
+
+addOptions(Options)
 - Method in class org.apache.hadoop.hbase.thrift2.ThriftServer
+
 addOptions()
 - Method in class org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidator
 
 addOptions()
 - Method in class org.apache.hadoop.hbase.tool.DataBlockEncodingValidator
@@ -2438,7 +2444,7 @@
 
 addScanner(RegionScanner)
 - Method in class org.apache.hadoop.hbase.regionserver.RSRpcServices.RegionScannersCloseCallBack
 
-addScanner(ResultScanner,
 boolean) - Method in class org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler
+addScanner(ResultScanner,
 boolean) - Method in class org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler
 
 Assigns a unique ID to the scanner and adds the mapping to 
an internal
  hash-map.
@@ -3247,7 +3253,7 @@
 Validates the input request parameters, parses columns from 
CellSetModel,
  and invokes Append on HTable.
 
-append(TAppend)
 - Method in class org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler
+append(TAppend)
 - Method in class org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler
 
 append(ByteBuffer,
 TAppend) - Method in class org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler
 
@@ -4396,9 +4402,9 @@
 
 AtomicAverageCounter()
 - Constructor for class org.apache.hadoop.hbase.client.HTableMultiplexer.AtomicAverageCounter
 
-atomicIncrement(ByteBuffer,
 ByteBuffer, ByteBuffer, long) - Method in class 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler
+atomicIncrement(ByteBuffer,
 ByteBuffer, ByteBuffer, long) - Method in class 
org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler
 
-atomicIncrement(ByteBuffer,
 ByteBuffer, byte[], byte[], long) - Method in class 
org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler
+atomicIncrement(ByteBuffer,
 ByteBuffer, byte[], byte[], long) - Method in class 
org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler
 
 AtomicUtils 
- Class in org.apache.hadoop.hbase.util
 
@@ -4897,9 +4903,9 @@
 
 backingStore
 - Variable in class org.apache.hadoop.hbase.io.util.LRUDictionary
 
-BACKLOG_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.thrift.ThriftServerRunner
+BACKLOG_CONF_DEAFULT
 - Static variable in class org.apache.hadoop.hbase.thrift.Constants
 
-BACKLOG_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.thrift2.ThriftServer
+BACKLOG_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.thrift.Constants
 
 BACKOFF_POLICY_CLASS
 - Static variable in interface org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy
 
@@ -6064,9 +6070,9 @@
 
 BIND_ADDRESS
 - Static variable in class org.apache.hadoop.hbase.http.HttpServer
 
-BIND_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.thrift.ThriftServerRunner
+BIND_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.thrift.Constants
 
-BIND_OPTION
 - Static variable in class org.apache.hadoop.hbase.thrift.ThriftServer
+BIND_OPTION
 - Static variable in class org.apache.hadoop.hbase.thrift.Constants
 
 bindAddress
 - Variable in class org.apache.hadoop.hbase.http.HttpServer.Builder
 
@@ -6076,8 +6082,6 @@
 
 bindAddress
 - Variable in class org.apache.hadoop.hbase.ipc.RpcServer
 
-bindToPort(String,
 int) - Static method in class org.apache.hadoop.hbase.thrift2.ThriftServer
-
 bins
 - Variable in class org.apache.hadoop.hbase.metrics.impl.FastLongHistogram
 
 Bins(int)
 - Constructor for class org.apache.hadoop.hbase.metrics.impl.FastLongHistogram.Bins
@@ -9314,7 +9318,7 @@
 
 canSchedule(MasterProcedureEnv,
 HRegionLocation) - Method in class 
org.apache.hadoop.hbase.master.procedure.ReopenTableRegionsProcedure
 
-canSpecifyBindIP
 - Variable in enum org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType
+canSpecifyBindIP
 - Variable in enum org.apache.hadoop.hbase.thrift.ImplType
 
 canSplit()
 - Method in class org.apache.hadoop.hbase.regionserver.HStore
 
@@ -9437,7 +9441,7 @@
 
 catalogJanitorSwitch(boolean)
 - Method in class org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin
 
-cause
 - Variable in exception 

[37/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
deleted file mode 100644
index a32f4d1..000
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
+++ /dev/null
@@ -1,414 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-ThriftServerRunner.IOErrorWithCause (Apache HBase 3.0.0-SNAPSHOT 
API)
-
-
-
-
-
-var methods = {"i0":10,"i1":10,"i2":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.thrift
-Class 
ThriftServerRunner.IOErrorWithCause
-
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">java.lang.Throwable
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">java.lang.Exception
-
-
-org.apache.thrift.TException
-
-
-org.apache.hadoop.hbase.thrift.generated.IOError
-
-
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.IOErrorWithCause
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-All Implemented Interfaces:
-https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable, https://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">Comparableorg.apache.hadoop.hbase.thrift.generated.IOError,
 
org.apache.thrift.TBaseorg.apache.hadoop.hbase.thrift.generated.IOError,org.apache.hadoop.hbase.thrift.generated.IOError._Fields
-
-
-Enclosing class:
-ThriftServerRunner
-
-
-
-public static class ThriftServerRunner.IOErrorWithCause
-extends org.apache.hadoop.hbase.thrift.generated.IOError
-
-See Also:
-Serialized
 Form
-
-
-
-
-
-
-
-
-
-
-
-
-Nested Class Summary
-
-
-
-
-Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.thrift.generated.IOError
-org.apache.hadoop.hbase.thrift.generated.IOError._Fields
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-private https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable
-cause
-
-
-
-
-
-
-Fields inherited from 
classorg.apache.hadoop.hbase.thrift.generated.IOError
-message, metaDataMap
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Constructor and Description
-
-
-IOErrorWithCause(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwablecause)
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All MethodsInstance MethodsConcrete Methods
-
-Modifier and Type
-Method and Description
-
-
-boolean
-equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in 
java.lang">Objectother)
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable
-getCause()
-
-
-int
-hashCode()
-
-
-
-
-
-
-Methods inherited from 
classorg.apache.hadoop.hbase.thrift.generated.IOError
-clear, compareTo, deepCopy, equals, fieldForId, getFieldValue, 
getMessage, isSet, isSetMessage, read, setFieldValue, setMessage, 
setMessageIsSet, toString, unsetMessage, validate, write
-
-
-
-
-
-Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 

[23/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/Constants.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/Constants.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/Constants.html
new file mode 100644
index 000..d28d77e
--- /dev/null
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/Constants.html
@@ -0,0 +1,223 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.thrift;
+019
+020import 
org.apache.yetus.audience.InterfaceAudience;
+021
+022/**
+023 * Thrift related constants
+024 */
+025@InterfaceAudience.Private
+026public final class Constants {
+027  private Constants(){}
+028
+029  public static final int 
DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k
+030
+031  public static final String 
SERVER_TYPE_CONF_KEY =
+032  
"hbase.regionserver.thrift.server.type";
+033
+034  public static final String 
COMPACT_CONF_KEY = "hbase.regionserver.thrift.compact";
+035  public static final boolean 
COMPACT_CONF_DEFAULT = false;
+036
+037  public static final String 
FRAMED_CONF_KEY = "hbase.regionserver.thrift.framed";
+038  public static final boolean 
FRAMED_CONF_DEFAULT = false;
+039
+040  public static final String 
MAX_FRAME_SIZE_CONF_KEY =
+041  
"hbase.regionserver.thrift.framed.max_frame_size_in_mb";
+042  public static final int 
MAX_FRAME_SIZE_CONF_DEFAULT = 2;
+043
+044  public static final String 
COALESCE_INC_KEY = "hbase.regionserver.thrift.coalesceIncrement";
+045  public static final String 
USE_HTTP_CONF_KEY = "hbase.regionserver.thrift.http";
+046
+047  public static final String 
HTTP_MIN_THREADS_KEY = "hbase.thrift.http_threads.min";
+048  public static final int 
HTTP_MIN_THREADS_KEY_DEFAULT = 2;
+049
+050  public static final String 
HTTP_MAX_THREADS_KEY = "hbase.thrift.http_threads.max";
+051  public static final int 
HTTP_MAX_THREADS_KEY_DEFAULT = 100;
+052
+053  // ssl related configs
+054  public static final String 
THRIFT_SSL_ENABLED_KEY = "hbase.thrift.ssl.enabled";
+055  public static final String 
THRIFT_SSL_KEYSTORE_STORE_KEY = "hbase.thrift.ssl.keystore.store";
+056  public static final String 
THRIFT_SSL_KEYSTORE_PASSWORD_KEY =
+057  
"hbase.thrift.ssl.keystore.password";
+058  public static final String 
THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY
+059  = 
"hbase.thrift.ssl.keystore.keypassword";
+060  public static final String 
THRIFT_SSL_EXCLUDE_CIPHER_SUITES_KEY =
+061  
"hbase.thrift.ssl.exclude.cipher.suites";
+062  public static final String 
THRIFT_SSL_INCLUDE_CIPHER_SUITES_KEY =
+063  
"hbase.thrift.ssl.include.cipher.suites";
+064  public static final String 
THRIFT_SSL_EXCLUDE_PROTOCOLS_KEY =
+065  
"hbase.thrift.ssl.exclude.protocols";
+066  public static final String 
THRIFT_SSL_INCLUDE_PROTOCOLS_KEY =
+067  
"hbase.thrift.ssl.include.protocols";
+068
+069
+070  public static final String 
THRIFT_SUPPORT_PROXYUSER_KEY = "hbase.thrift.support.proxyuser";
+071
+072  //kerberos related configs
+073  public static final String 
THRIFT_DNS_INTERFACE_KEY = "hbase.thrift.dns.interface";
+074  public static final String 
THRIFT_DNS_NAMESERVER_KEY = "hbase.thrift.dns.nameserver";
+075  public static final String 
THRIFT_KERBEROS_PRINCIPAL_KEY = "hbase.thrift.kerberos.principal";
+076  public static final String 
THRIFT_KEYTAB_FILE_KEY = "hbase.thrift.keytab.file";
+077  public static final String 
THRIFT_SPNEGO_PRINCIPAL_KEY = "hbase.thrift.spnego.principal";
+078  public static final String 
THRIFT_SPNEGO_KEYTAB_FILE_KEY = "hbase.thrift.spnego.keytab.file";
+079
+080  /**
+081   * Amount of time in milliseconds 
before a server thread will timeout
+082   * waiting for client to send data on a 
connected socket. Currently,
+083   * applies only to 
TBoundedThreadPoolServer
+084   */
+085  public static final String 
THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY =
+086  
"hbase.thrift.server.socket.read.timeout";
+087  public static final int 

[10/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
index 1a0f64e..2290ca8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
@@ -26,849 +26,796 @@
 018 */
 019package 
org.apache.hadoop.hbase.thrift2;
 020
-021import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
-022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.columnFamilyDescriptorFromThrift;
-023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.compareOpFromThrift;
-024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
-025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
-026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
-027import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
-028import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
-029import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromHBase;
-030import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromThrift;
-031import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorsFromHBase;
-032import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
-033import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
-034import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
-035import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
-036import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
-037import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
-038import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.splitKeyFromThrift;
-039import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromHBase;
-040import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromThrift;
-041import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorsFromHBase;
-042import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNameFromThrift;
-043import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNamesFromHBase;
-044import static 
org.apache.thrift.TBaseHelper.byteBufferToByteArray;
-045
-046import java.io.IOException;
-047import 
java.lang.reflect.InvocationHandler;
-048import 
java.lang.reflect.InvocationTargetException;
-049import java.lang.reflect.Method;
-050import java.lang.reflect.Proxy;
-051import java.nio.ByteBuffer;
-052import java.util.ArrayList;
-053import java.util.Collections;
-054import java.util.List;
-055import java.util.Map;
-056import 
java.util.concurrent.ConcurrentHashMap;
-057import 
java.util.concurrent.atomic.AtomicInteger;
-058import java.util.regex.Pattern;
-059
-060import 
org.apache.hadoop.conf.Configuration;
-061import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-062import 
org.apache.hadoop.hbase.HRegionLocation;
-063import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-064import 
org.apache.hadoop.hbase.TableName;
-065import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-066import 
org.apache.hadoop.hbase.client.RegionLocator;
-067import 
org.apache.hadoop.hbase.client.ResultScanner;
-068import 
org.apache.hadoop.hbase.client.Table;
-069import 
org.apache.hadoop.hbase.client.TableDescriptor;
-070import 
org.apache.hadoop.hbase.security.UserProvider;
-071import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;
-072import 
org.apache.hadoop.hbase.thrift2.generated.TAppend;
-073import 
org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor;
-074import 
org.apache.hadoop.hbase.thrift2.generated.TCompareOp;
-075import 
org.apache.hadoop.hbase.thrift2.generated.TDelete;
-076import 
org.apache.hadoop.hbase.thrift2.generated.TGet;
-077import 
org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-078import 
org.apache.hadoop.hbase.thrift2.generated.THRegionLocation;
-079import 
org.apache.hadoop.hbase.thrift2.generated.TIOError;
-080import 
org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument;
-081import 
org.apache.hadoop.hbase.thrift2.generated.TIncrement;
-082import 
org.apache.hadoop.hbase.thrift2.generated.TNamespaceDescriptor;
-083import 
org.apache.hadoop.hbase.thrift2.generated.TPut;
-084import 
org.apache.hadoop.hbase.thrift2.generated.TResult;
-085import 

[04/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/testdevapidocs/org/apache/hadoop/hbase/thrift2/TestThrift2ServerCmdLine.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/thrift2/TestThrift2ServerCmdLine.html 
b/testdevapidocs/org/apache/hadoop/hbase/thrift2/TestThrift2ServerCmdLine.html
new file mode 100644
index 000..54f2a06
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/thrift2/TestThrift2ServerCmdLine.html
@@ -0,0 +1,374 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+TestThrift2ServerCmdLine (Apache HBase 3.0.0-SNAPSHOT Test API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.thrift2
+Class 
TestThrift2ServerCmdLine
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.thrift.TestThriftServerCmdLine
+
+
+org.apache.hadoop.hbase.thrift2.TestThrift2ServerCmdLine
+
+
+
+
+
+
+
+
+
+
+public class TestThrift2ServerCmdLine
+extends TestThriftServerCmdLine
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static HBaseClassTestRule
+CLASS_RULE
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+TABLENAME
+
+
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.thrift.TestThriftServerCmdLine
+implType,
 port,
 specifyBindIP,
 specifyCompact,
 specifyFramed,
 tableCreated,
 TEST_UTIL
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+TestThrift2ServerCmdLine(org.apache.hadoop.hbase.thrift.ImplTypeimplType,
+booleanspecifyFramed,
+booleanspecifyBindIP,
+booleanspecifyCompact)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected 
org.apache.hadoop.hbase.thrift2.ThriftServer
+createThriftServer()
+
+
+protected void
+talkToThriftServer()
+
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.thrift.TestThriftServerCmdLine
+getParameters,
 setUpBeforeClass,
 tearDownAfterClass,
 testRunThriftServer
+
+
+
+
+
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, 

[14/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
deleted file mode 100644
index e692633..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
+++ /dev/null
@@ -1,2103 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/*
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.thrift;
-020
-021import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
-022
-023import java.io.IOException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.security.PrivilegedAction;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.TreeMap;
-036import 
java.util.concurrent.BlockingQueue;
-037import 
java.util.concurrent.ExecutorService;
-038import 
java.util.concurrent.LinkedBlockingQueue;
-039import 
java.util.concurrent.ThreadPoolExecutor;
-040import java.util.concurrent.TimeUnit;
-041
-042import 
javax.security.auth.callback.Callback;
-043import 
javax.security.auth.callback.UnsupportedCallbackException;
-044import 
javax.security.sasl.AuthorizeCallback;
-045import javax.security.sasl.SaslServer;
-046
-047import 
org.apache.commons.lang3.ArrayUtils;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.Cell.Type;
-050import 
org.apache.hadoop.hbase.CellBuilder;
-051import 
org.apache.hadoop.hbase.CellBuilderFactory;
-052import 
org.apache.hadoop.hbase.CellBuilderType;
-053import 
org.apache.hadoop.hbase.CellUtil;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HColumnDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.KeyValue;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.ServerName;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.client.Admin;
-065import 
org.apache.hadoop.hbase.client.Append;
-066import 
org.apache.hadoop.hbase.client.Delete;
-067import 
org.apache.hadoop.hbase.client.Durability;
-068import 
org.apache.hadoop.hbase.client.Get;
-069import 
org.apache.hadoop.hbase.client.Increment;
-070import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
-071import 
org.apache.hadoop.hbase.client.Put;
-072import 
org.apache.hadoop.hbase.client.RegionInfo;
-073import 
org.apache.hadoop.hbase.client.RegionLocator;
-074import 
org.apache.hadoop.hbase.client.Result;
-075import 
org.apache.hadoop.hbase.client.ResultScanner;
-076import 
org.apache.hadoop.hbase.client.Scan;
-077import 
org.apache.hadoop.hbase.client.Table;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.ParseFilter;
-080import 
org.apache.hadoop.hbase.filter.PrefixFilter;
-081import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-082import 
org.apache.hadoop.hbase.http.HttpServerUtil;
-083import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-084import 
org.apache.hadoop.hbase.security.SaslUtil;
-085import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-086import 
org.apache.hadoop.hbase.security.SecurityUtil;
-087import 
org.apache.hadoop.hbase.security.UserProvider;
-088import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-089import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-090import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-091import 

[4/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 
server


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2d8d74c6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2d8d74c6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2d8d74c6

Branch: refs/heads/branch-2
Commit: 2d8d74c64db6dd1894fd8f6e409f3e7fae426d11
Parents: 0b08608
Author: Allan Yang 
Authored: Wed Jan 2 17:34:31 2019 +0800
Committer: Allan Yang 
Committed: Wed Jan 2 17:34:31 2019 +0800

--
 .../apache/hadoop/hbase/thrift/Constants.java   |  151 ++
 .../hbase/thrift/HBaseServiceHandler.java   |   90 +
 .../hbase/thrift/HbaseHandlerMetricsProxy.java  |   20 +-
 .../apache/hadoop/hbase/thrift/ImplType.java|  143 ++
 .../hadoop/hbase/thrift/IncrementCoalescer.java |6 +-
 .../hbase/thrift/ThriftHBaseServiceHandler.java | 1347 
 .../hadoop/hbase/thrift/ThriftHttpServlet.java  |   12 +-
 .../hadoop/hbase/thrift/ThriftServer.java   |  709 +-
 .../hadoop/hbase/thrift/ThriftServerRunner.java | 2026 --
 .../thrift2/ThriftHBaseServiceHandler.java  |   69 +-
 .../hadoop/hbase/thrift2/ThriftServer.java  |  581 +
 .../resources/hbase-webapps/thrift/thrift.jsp   |2 +-
 .../hbase/thrift/TestThriftHttpServer.java  |   28 +-
 .../hadoop/hbase/thrift/TestThriftServer.java   |   58 +-
 .../hbase/thrift/TestThriftServerCmdLine.java   |   48 +-
 .../thrift/TestThriftSpnegoHttpServer.java  |   21 +-
 .../hbase/thrift2/TestThrift2HttpServer.java|   90 +
 .../hbase/thrift2/TestThrift2ServerCmdLine.java |   99 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   15 +-
 19 files changed, 2717 insertions(+), 2798 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2d8d74c6/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
new file mode 100644
index 000..8e3d004
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Thrift related constants
+ */
+@InterfaceAudience.Private
+public final class Constants {
+  private Constants(){}
+
+  public static final int DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k
+
+  public static final String SERVER_TYPE_CONF_KEY =
+  "hbase.regionserver.thrift.server.type";
+
+  public static final String COMPACT_CONF_KEY = 
"hbase.regionserver.thrift.compact";
+  public static final boolean COMPACT_CONF_DEFAULT = false;
+
+  public static final String FRAMED_CONF_KEY = 
"hbase.regionserver.thrift.framed";
+  public static final boolean FRAMED_CONF_DEFAULT = false;
+
+  public static final String MAX_FRAME_SIZE_CONF_KEY =
+  "hbase.regionserver.thrift.framed.max_frame_size_in_mb";
+  public static final int MAX_FRAME_SIZE_CONF_DEFAULT = 2;
+
+  public static final String COALESCE_INC_KEY = 
"hbase.regionserver.thrift.coalesceIncrement";
+  public static final String USE_HTTP_CONF_KEY = 
"hbase.regionserver.thrift.http";
+
+  public static final String HTTP_MIN_THREADS_KEY = 
"hbase.thrift.http_threads.min";
+  public static final int HTTP_MIN_THREADS_KEY_DEFAULT = 2;
+
+  public static final String HTTP_MAX_THREADS_KEY = 
"hbase.thrift.http_threads.max";
+  public static final int HTTP_MAX_THREADS_KEY_DEFAULT = 100;
+
+  // ssl related configs
+  public static final String THRIFT_SSL_ENABLED_KEY = 
"hbase.thrift.ssl.enabled";
+  public static final String THRIFT_SSL_KEYSTORE_STORE_KEY = 
"hbase.thrift.ssl.keystore.store";
+  public static final String THRIFT_SSL_KEYSTORE_PASSWORD_KEY =
+  "hbase.thrift.ssl.keystore.password";
+  public static final String 

[3/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
http://git-wip-us.apache.org/repos/asf/hbase/blob/2d8d74c6/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
index 8dadd49..d5e75b8 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
@@ -18,16 +18,132 @@
 
 package org.apache.hadoop.hbase.thrift;
 
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_DEAFULT;
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_BIND_ADDR;
+import static 
org.apache.hadoop.hbase.thrift.Constants.DEFAULT_HTTP_MAX_HEADER_SIZE;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_LISTEN_PORT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.INFOPORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.KEEP_ALIVE_SEC_OPTION;
+import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_QUEUE_SIZE_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MIN_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.READ_TIMEOUT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.SELECTOR_NUM_OPTION;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_INTERFACE_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_NAMESERVER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_FILTERS;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_QOP_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SELECTOR_NUM;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_ENABLED_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_CIPHER_SUITES_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_PROTOCOLS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_INCLUDE_CIPHER_SUITES_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_INCLUDE_PROTOCOLS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_PASSWORD_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_STORE_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SUPPORT_PROXYUSER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.USE_HTTP_CONF_KEY;
+
+import 

[1/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
Repository: hbase
Updated Branches:
  refs/heads/branch-2 0b086087b -> 2d8d74c64


http://git-wip-us.apache.org/repos/asf/hbase/blob/2d8d74c6/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
index 7a611c9..fa3d39d 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
@@ -18,351 +18,86 @@
  */
 package org.apache.hadoop.hbase.thrift2;
 
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.security.PrivilegedAction;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
+import static org.apache.hadoop.hbase.thrift.Constants.READONLY_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_READONLY_ENABLED;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_READONLY_ENABLED_DEFAULT;
 
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
+import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.http.InfoServer;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityUtil;
 import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.CallQueue;
-import org.apache.hadoop.hbase.thrift.THBaseThreadPoolExecutor;
-import org.apache.hadoop.hbase.thrift.ThriftMetrics;
+import org.apache.hadoop.hbase.thrift.HBaseServiceHandler;
+import org.apache.hadoop.hbase.thrift.HbaseHandlerMetricsProxy;
 import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.util.JvmPauseMonitor;
-import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.thrift.TException;
 import org.apache.thrift.TProcessor;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.protocol.TProtocolFactory;
-import org.apache.thrift.server.THsHaServer;
-import org.apache.thrift.server.TNonblockingServer;
-import org.apache.thrift.server.TServer;
-import org.apache.thrift.server.TThreadPoolServer;
-import org.apache.thrift.server.TThreadedSelectorServer;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TNonblockingServerSocket;
-import org.apache.thrift.transport.TNonblockingServerTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TServerTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.apache.thrift.transport.TTransportFactory;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.OptionGroup;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
 
 /**
- * ThriftServer - this class starts up a Thrift server which implements the 
HBase API specified in the
- * HbaseClient.thrift IDL file.
+ * ThriftServer - this class starts up a Thrift server which implements the 
HBase API specified in
+ * the HbaseClient.thrift IDL file.
  */
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 

[2/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
http://git-wip-us.apache.org/repos/asf/hbase/blob/2d8d74c6/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
deleted file mode 100644
index b510ff5..000
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ /dev/null
@@ -1,2026 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.thrift;
-
-import static org.apache.hadoop.hbase.util.Bytes.getBytes;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
-
-import org.apache.commons.lang3.ArrayUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell.Type;
-import org.apache.hadoop.hbase.CellBuilder;
-import org.apache.hadoop.hbase.CellBuilderFactory;
-import org.apache.hadoop.hbase.CellBuilderType;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Append;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.OperationWithAttributes;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
-import org.apache.hadoop.hbase.log.HBaseMarkers;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-import org.apache.hadoop.hbase.security.SecurityUtil;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-import org.apache.hadoop.hbase.thrift.generated.Hbase;
-import org.apache.hadoop.hbase.thrift.generated.IOError;
-import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
-import org.apache.hadoop.hbase.thrift.generated.Mutation;
-import org.apache.hadoop.hbase.thrift.generated.TAppend;
-import org.apache.hadoop.hbase.thrift.generated.TCell;
-import org.apache.hadoop.hbase.thrift.generated.TIncrement;
-import 

[3/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
index fc00327..6d11ac6 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
@@ -18,16 +18,132 @@
 
 package org.apache.hadoop.hbase.thrift;
 
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_DEAFULT;
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_BIND_ADDR;
+import static 
org.apache.hadoop.hbase.thrift.Constants.DEFAULT_HTTP_MAX_HEADER_SIZE;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_LISTEN_PORT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.INFOPORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.KEEP_ALIVE_SEC_OPTION;
+import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_QUEUE_SIZE_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MIN_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.READ_TIMEOUT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.SELECTOR_NUM_OPTION;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_INTERFACE_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_NAMESERVER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_FILTERS;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_QOP_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SELECTOR_NUM;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_ENABLED_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_CIPHER_SUITES_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_PROTOCOLS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_INCLUDE_CIPHER_SUITES_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_INCLUDE_PROTOCOLS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_PASSWORD_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_STORE_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SUPPORT_PROXYUSER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.USE_HTTP_CONF_KEY;
+
+import 

[4/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 
server


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e4b6b4af
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e4b6b4af
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e4b6b4af

Branch: refs/heads/master
Commit: e4b6b4afb933a961f543537875f87a2dc62d3757
Parents: f0b50a8
Author: Allan Yang 
Authored: Wed Jan 2 16:13:17 2019 +0800
Committer: Allan Yang 
Committed: Wed Jan 2 16:13:57 2019 +0800

--
 .../apache/hadoop/hbase/thrift/Constants.java   |  151 ++
 .../hbase/thrift/HBaseServiceHandler.java   |   90 +
 .../hbase/thrift/HbaseHandlerMetricsProxy.java  |   20 +-
 .../apache/hadoop/hbase/thrift/ImplType.java|  143 ++
 .../hadoop/hbase/thrift/IncrementCoalescer.java |6 +-
 .../hbase/thrift/ThriftHBaseServiceHandler.java | 1347 
 .../hadoop/hbase/thrift/ThriftHttpServlet.java  |   12 +-
 .../hadoop/hbase/thrift/ThriftServer.java   |  698 +-
 .../hadoop/hbase/thrift/ThriftServerRunner.java | 2031 --
 .../thrift2/ThriftHBaseServiceHandler.java  |   69 +-
 .../hadoop/hbase/thrift2/ThriftServer.java  |  594 +
 .../resources/hbase-webapps/thrift/thrift.jsp   |2 +-
 .../hbase/thrift/TestThriftHttpServer.java  |   28 +-
 .../hadoop/hbase/thrift/TestThriftServer.java   |   58 +-
 .../hbase/thrift/TestThriftServerCmdLine.java   |   48 +-
 .../thrift/TestThriftSpnegoHttpServer.java  |   21 +-
 .../hbase/thrift2/TestThrift2HttpServer.java|   90 +
 .../hbase/thrift2/TestThrift2ServerCmdLine.java |   99 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   15 +-
 19 files changed, 2711 insertions(+), 2811 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
new file mode 100644
index 000..8e3d004
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Thrift related constants
+ */
+@InterfaceAudience.Private
+public final class Constants {
+  private Constants(){}
+
+  public static final int DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k
+
+  public static final String SERVER_TYPE_CONF_KEY =
+  "hbase.regionserver.thrift.server.type";
+
+  public static final String COMPACT_CONF_KEY = 
"hbase.regionserver.thrift.compact";
+  public static final boolean COMPACT_CONF_DEFAULT = false;
+
+  public static final String FRAMED_CONF_KEY = 
"hbase.regionserver.thrift.framed";
+  public static final boolean FRAMED_CONF_DEFAULT = false;
+
+  public static final String MAX_FRAME_SIZE_CONF_KEY =
+  "hbase.regionserver.thrift.framed.max_frame_size_in_mb";
+  public static final int MAX_FRAME_SIZE_CONF_DEFAULT = 2;
+
+  public static final String COALESCE_INC_KEY = 
"hbase.regionserver.thrift.coalesceIncrement";
+  public static final String USE_HTTP_CONF_KEY = 
"hbase.regionserver.thrift.http";
+
+  public static final String HTTP_MIN_THREADS_KEY = 
"hbase.thrift.http_threads.min";
+  public static final int HTTP_MIN_THREADS_KEY_DEFAULT = 2;
+
+  public static final String HTTP_MAX_THREADS_KEY = 
"hbase.thrift.http_threads.max";
+  public static final int HTTP_MAX_THREADS_KEY_DEFAULT = 100;
+
+  // ssl related configs
+  public static final String THRIFT_SSL_ENABLED_KEY = 
"hbase.thrift.ssl.enabled";
+  public static final String THRIFT_SSL_KEYSTORE_STORE_KEY = 
"hbase.thrift.ssl.keystore.store";
+  public static final String THRIFT_SSL_KEYSTORE_PASSWORD_KEY =
+  "hbase.thrift.ssl.keystore.password";
+  public static final String 

[1/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
Repository: hbase
Updated Branches:
  refs/heads/master f0b50a8f9 -> e4b6b4afb


http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
index 5681569..fa3d39d 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
@@ -18,355 +18,86 @@
  */
 package org.apache.hadoop.hbase.thrift2;
 
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.security.PrivilegedAction;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
+import static org.apache.hadoop.hbase.thrift.Constants.READONLY_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_READONLY_ENABLED;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_READONLY_ENABLED_DEFAULT;
 
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
+import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.http.InfoServer;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityUtil;
 import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.CallQueue;
-import org.apache.hadoop.hbase.thrift.THBaseThreadPoolExecutor;
-import org.apache.hadoop.hbase.thrift.ThriftMetrics;
+import org.apache.hadoop.hbase.thrift.HBaseServiceHandler;
+import org.apache.hadoop.hbase.thrift.HbaseHandlerMetricsProxy;
 import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.util.JvmPauseMonitor;
-import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.thrift.TException;
 import org.apache.thrift.TProcessor;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.protocol.TProtocolFactory;
-import org.apache.thrift.server.THsHaServer;
-import org.apache.thrift.server.TNonblockingServer;
-import org.apache.thrift.server.TServer;
-import org.apache.thrift.server.TThreadPoolServer;
-import org.apache.thrift.server.TThreadedSelectorServer;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TNonblockingServerSocket;
-import org.apache.thrift.transport.TNonblockingServerTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TServerTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.apache.thrift.transport.TTransportFactory;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.OptionGroup;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
 
 /**
  * ThriftServer - this class starts up a Thrift server which implements the 
HBase API specified in
  * the HbaseClient.thrift IDL file.
  */
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 
"NM_SAME_SIMPLE_NAME_AS_SUPERCLASS",
+justification = "Change the name will be an incompatible change, will do 
it later")
 

[2/4] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-02 Thread allan163
http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
deleted file mode 100644
index 5e248f1..000
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ /dev/null
@@ -1,2031 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.thrift;
-
-import static org.apache.hadoop.hbase.util.Bytes.getBytes;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
-
-import org.apache.commons.lang3.ArrayUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell.Type;
-import org.apache.hadoop.hbase.CellBuilder;
-import org.apache.hadoop.hbase.CellBuilderFactory;
-import org.apache.hadoop.hbase.CellBuilderType;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Append;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.OperationWithAttributes;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
-import org.apache.hadoop.hbase.log.HBaseMarkers;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-import org.apache.hadoop.hbase.security.SecurityUtil;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-import org.apache.hadoop.hbase.thrift.generated.Hbase;
-import org.apache.hadoop.hbase.thrift.generated.IOError;
-import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
-import org.apache.hadoop.hbase.thrift.generated.Mutation;
-import org.apache.hadoop.hbase.thrift.generated.TAppend;
-import org.apache.hadoop.hbase.thrift.generated.TCell;
-import org.apache.hadoop.hbase.thrift.generated.TIncrement;
-import 

hbase git commit: HBASE-21547 Precommit uses master flaky list for other branches

2019-01-02 Thread psomogyi
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 cdc40767e -> 2ef33e151


HBASE-21547 Precommit uses master flaky list for other branches

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2ef33e15
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2ef33e15
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2ef33e15

Branch: refs/heads/branch-1.2
Commit: 2ef33e1515dcbc2b23c05edb3e41478b0421654c
Parents: cdc4076
Author: Peter Somogyi 
Authored: Fri Dec 28 15:16:48 2018 +0100
Committer: Peter Somogyi 
Committed: Wed Jan 2 08:55:39 2019 +0100

--
 dev-support/hbase-personality.sh | 14 ++
 1 file changed, 14 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2ef33e15/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 3c52dae..b05f76f 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -255,6 +255,20 @@ function get_include_exclude_tests_arg
 yetus_error "Wget error $? in fetching includes file from url" \
  "${INCLUDE_TESTS_URL}. Ignoring and proceeding."
   fi
+  else
+# Use branch specific exclude list when EXCLUDE_TESTS_URL and 
INCLUDE_TESTS_URL are empty
+
FLAKY_URL="https://builds.apache.org/job/HBase-Find-Flaky-Tests/job/${PATCH_BRANCH}/lastSuccessfulBuild/artifact/excludes/;
+if wget "${FLAKY_URL}" -O "excludes"; then
+  excludes=$(cat excludes)
+yetus_debug "excludes=${excludes}"
+if [[ -n "${excludes}" ]]; then
+  eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
+fi
+rm excludes
+  else
+yetus_error "Wget error $? in fetching excludes file from url" \
+ "${FLAKY_URL}. Ignoring and proceeding."
+  fi
   fi
 }
 



hbase git commit: HBASE-21547 Precommit uses master flaky list for other branches

2019-01-02 Thread psomogyi
Repository: hbase
Updated Branches:
  refs/heads/branch-1 beeb0796e -> f9afd9254


HBASE-21547 Precommit uses master flaky list for other branches

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f9afd925
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f9afd925
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f9afd925

Branch: refs/heads/branch-1
Commit: f9afd925482a662033fbe959ab746f1f0ea1a7e1
Parents: beeb079
Author: Peter Somogyi 
Authored: Fri Dec 28 15:16:48 2018 +0100
Committer: Peter Somogyi 
Committed: Wed Jan 2 08:41:46 2019 +0100

--
 dev-support/hbase-personality.sh | 14 ++
 1 file changed, 14 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f9afd925/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 93727b3..7f930d0 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -259,6 +259,20 @@ function get_include_exclude_tests_arg
 yetus_error "Wget error $? in fetching includes file from url" \
  "${INCLUDE_TESTS_URL}. Ignoring and proceeding."
   fi
+  else
+# Use branch specific exclude list when EXCLUDE_TESTS_URL and 
INCLUDE_TESTS_URL are empty
+
FLAKY_URL="https://builds.apache.org/job/HBase-Find-Flaky-Tests/job/${PATCH_BRANCH}/lastSuccessfulBuild/artifact/excludes/;
+if wget "${FLAKY_URL}" -O "excludes"; then
+  excludes=$(cat excludes)
+yetus_debug "excludes=${excludes}"
+if [[ -n "${excludes}" ]]; then
+  eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
+fi
+rm excludes
+  else
+yetus_error "Wget error $? in fetching excludes file from url" \
+ "${FLAKY_URL}. Ignoring and proceeding."
+  fi
   fi
 }
 



hbase git commit: HBASE-21547 Precommit uses master flaky list for other branches

2019-01-02 Thread psomogyi
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 f7470a8b5 -> 928baae11


HBASE-21547 Precommit uses master flaky list for other branches

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/928baae1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/928baae1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/928baae1

Branch: refs/heads/branch-1.4
Commit: 928baae115f90c9beabeceeb0ed41db4afec29b0
Parents: f7470a8
Author: Peter Somogyi 
Authored: Fri Dec 28 15:16:48 2018 +0100
Committer: Peter Somogyi 
Committed: Wed Jan 2 08:43:24 2019 +0100

--
 dev-support/hbase-personality.sh | 14 ++
 1 file changed, 14 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/928baae1/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 6c99839..705d775 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -259,6 +259,20 @@ function get_include_exclude_tests_arg
 yetus_error "Wget error $? in fetching includes file from url" \
  "${INCLUDE_TESTS_URL}. Ignoring and proceeding."
   fi
+  else
+# Use branch specific exclude list when EXCLUDE_TESTS_URL and 
INCLUDE_TESTS_URL are empty
+
FLAKY_URL="https://builds.apache.org/job/HBase-Find-Flaky-Tests/job/${PATCH_BRANCH}/lastSuccessfulBuild/artifact/excludes/;
+if wget "${FLAKY_URL}" -O "excludes"; then
+  excludes=$(cat excludes)
+yetus_debug "excludes=${excludes}"
+if [[ -n "${excludes}" ]]; then
+  eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
+fi
+rm excludes
+  else
+yetus_error "Wget error $? in fetching excludes file from url" \
+ "${FLAKY_URL}. Ignoring and proceeding."
+  fi
   fi
 }