[15/15] hbase git commit: HBASE-21579 Use AsyncClusterConnection for HBaseInterClusterReplicationEndpoint

2019-01-03 Thread zhangduo
HBASE-21579 Use AsyncClusterConnection for HBaseInterClusterReplicationEndpoint


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04e6909a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04e6909a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04e6909a

Branch: refs/heads/HBASE-21512
Commit: 04e6909adfdf2e3a5b92fadb13dd8fc06e15c5e3
Parents: 0d051ab
Author: zhangduo 
Authored: Tue Jan 1 21:27:14 2019 +0800
Committer: Duo Zhang 
Committed: Fri Jan 4 14:34:02 2019 +0800

--
 .../hbase/client/AsyncRegionServerAdmin.java| 14 +--
 .../hbase/protobuf/ReplicationProtbufUtil.java  | 35 +
 .../HBaseInterClusterReplicationEndpoint.java   | 31 +++
 .../regionserver/ReplicationSinkManager.java| 40 +++-
 .../replication/SyncReplicationTestBase.java| 12 +++---
 .../TestReplicationSinkManager.java | 21 +-
 6 files changed, 74 insertions(+), 79 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04e6909a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
index 9accd89..b9141a9 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.concurrent.CompletableFuture;
+import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.yetus.audience.InterfaceAudience;
@@ -94,9 +95,9 @@ public class AsyncRegionServerAdmin {
 void call(AdminService.Interface stub, HBaseRpcController controller, 
RpcCallback done);
   }
 
-  private  CompletableFuture call(RpcCall rpcCall) {
+  private  CompletableFuture call(RpcCall rpcCall, 
CellScanner cellScanner) {
 CompletableFuture future = new CompletableFuture<>();
-HBaseRpcController controller = conn.rpcControllerFactory.newController();
+HBaseRpcController controller = 
conn.rpcControllerFactory.newController(cellScanner);
 try {
   rpcCall.call(conn.getAdminStub(server), controller, new 
RpcCallback() {
 
@@ -115,6 +116,10 @@ public class AsyncRegionServerAdmin {
 return future;
   }
 
+  private  CompletableFuture call(RpcCall rpcCall) {
+return call(rpcCall, null);
+  }
+
   public CompletableFuture 
getRegionInfo(GetRegionInfoRequest request) {
 return call((stub, controller, done) -> stub.getRegionInfo(controller, 
request, done));
   }
@@ -154,8 +159,9 @@ public class AsyncRegionServerAdmin {
   }
 
   public CompletableFuture replicateWALEntry(
-  ReplicateWALEntryRequest request) {
-return call((stub, controller, done) -> stub.replicateWALEntry(controller, 
request, done));
+  ReplicateWALEntryRequest request, CellScanner cellScanner) {
+return call((stub, controller, done) -> stub.replicateWALEntry(controller, 
request, done),
+  cellScanner);
   }
 
   public CompletableFuture 
replay(ReplicateWALEntryRequest request) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/04e6909a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index c1b3911..74fad26 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -20,51 +20,54 @@ package org.apache.hadoop.hbase.protobuf;
 
 
 import java.io.IOException;
+import java.io.InterruptedIOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.client.AsyncRegionServerAdmin;
 import org.apache.hadoop.hbase.io.SizedCellScanner;
-import org.apache.hadoop.hbase.ipc.HBaseRpcController;
-import org.apache.hadoop.hbase.ipc.HBaseRpcControllerImpl;
 import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
 import 

[11/15] hbase git commit: HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)

2019-01-03 Thread zhangduo
HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3fbdd5bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3fbdd5bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3fbdd5bb

Branch: refs/heads/HBASE-21512
Commit: 3fbdd5bbe9937bdb12984275dafced9cb6746a63
Parents: c300565
Author: Nihal Jain 
Authored: Wed Jan 2 01:24:08 2019 +0530
Committer: stack 
Committed: Thu Jan 3 21:58:28 2019 -0800

--
 hbase-shell/src/main/ruby/hbase_constants.rb  |  2 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 36 --
 2 files changed, 35 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3fbdd5bb/hbase-shell/src/main/ruby/hbase_constants.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb 
b/hbase-shell/src/main/ruby/hbase_constants.rb
index 4c1ad22..9871685 100644
--- a/hbase-shell/src/main/ruby/hbase_constants.rb
+++ b/hbase-shell/src/main/ruby/hbase_constants.rb
@@ -43,8 +43,10 @@ module HBaseConstants
   METADATA = org.apache.hadoop.hbase.HConstants::METADATA
   STOPROW = 'STOPROW'.freeze
   STARTROW = 'STARTROW'.freeze
+  STARTKEY = STARTROW
   ROWPREFIXFILTER = 'ROWPREFIXFILTER'.freeze
   ENDROW = STOPROW
+  ENDKEY = STOPROW
   RAW = 'RAW'.freeze
   LIMIT = 'LIMIT'.freeze
   METHOD = 'METHOD'.freeze

http://git-wip-us.apache.org/repos/asf/hbase/blob/3fbdd5bb/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 5ec317a..b3343e2 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -476,6 +476,16 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
+define_test "scan should support STARTKEY parameter" do
+  res = @test_table._scan_internal STARTKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_nil(res['1'])
+  assert_not_nil(res['2'])
+  assert_not_nil(res['2']['x:a'])
+  assert_not_nil(res['2']['x:b'])
+end
+
 define_test "scan should support STOPROW parameter" do
   res = @test_table._scan_internal STOPROW => '2'
   assert_not_nil(res)
@@ -486,7 +496,27 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 1)" do
+define_test "scan should support ENDROW parameter" do
+  res = @test_table._scan_internal ENDROW => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test "scan should support ENDKEY parameter" do
+  res = @test_table._scan_internal ENDKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test 'scan should support ROWPREFIXFILTER parameter (test 1)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '1'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -496,7 +526,7 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 2)" do
+define_test 'scan should support ROWPREFIXFILTER parameter (test 2)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '2'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -506,7 +536,7 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
-define_test "scan should support LIMIT parameter" do
+define_test 'scan should support LIMIT parameter' do
   res = @test_table._scan_internal LIMIT => 1
   assert_not_nil(res)
   assert_kind_of(Hash, res)



[08/15] hbase git commit: HBASE-17356 Add replica get support

2019-01-03 Thread zhangduo
HBASE-17356 Add replica get support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/db66e6cc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/db66e6cc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/db66e6cc

Branch: refs/heads/HBASE-21512
Commit: db66e6cc9e1c6ea027631388aba688cb623b7d0a
Parents: e4b6b4a
Author: zhangduo 
Authored: Tue Jan 1 21:59:37 2019 +0800
Committer: zhangduo 
Committed: Thu Jan 3 08:38:20 2019 +0800

--
 .../apache/hadoop/hbase/RegionLocations.java|   30 +-
 .../client/AsyncBatchRpcRetryingCaller.java |  114 +-
 .../client/AsyncConnectionConfiguration.java|   12 +
 .../hbase/client/AsyncConnectionImpl.java   |1 -
 .../hbase/client/AsyncMetaRegionLocator.java|  125 +-
 .../hbase/client/AsyncNonMetaRegionLocator.java |  291 +--
 .../hadoop/hbase/client/AsyncRegionLocator.java |  129 +-
 .../hbase/client/AsyncRegionLocatorHelper.java  |  147 ++
 .../hbase/client/AsyncRpcRetryingCaller.java|   15 +-
 .../client/AsyncRpcRetryingCallerFactory.java   |   55 +-
 .../AsyncSingleRequestRpcRetryingCaller.java|   71 +-
 .../hbase/client/AsyncTableRegionLocator.java   |   28 +-
 .../client/AsyncTableRegionLocatorImpl.java |6 +-
 .../hbase/client/ConnectionConfiguration.java   |5 +-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 2033 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  208 +-
 .../apache/hadoop/hbase/util/FutureUtils.java   |   60 +
 .../hbase/client/RegionReplicaTestHelper.java   |  161 ++
 .../client/TestAsyncMetaRegionLocator.java  |   55 +-
 .../client/TestAsyncNonMetaRegionLocator.java   |  126 +-
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |   20 +-
 ...TestAsyncSingleRequestRpcRetryingCaller.java |   56 +-
 .../client/TestAsyncTableLocatePrefetch.java|4 +-
 .../client/TestAsyncTableRegionReplicasGet.java |  204 ++
 .../hbase/client/TestZKAsyncRegistry.java   |   44 +-
 25 files changed, 2366 insertions(+), 1634 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/db66e6cc/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index fd6f3c7..f98bf03 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -56,8 +56,8 @@ public class RegionLocations {
 int index = 0;
 for (HRegionLocation loc : locations) {
   if (loc != null) {
-if (loc.getRegionInfo().getReplicaId() >= maxReplicaId) {
-  maxReplicaId = loc.getRegionInfo().getReplicaId();
+if (loc.getRegion().getReplicaId() >= maxReplicaId) {
+  maxReplicaId = loc.getRegion().getReplicaId();
   maxReplicaIdIndex = index;
 }
   }
@@ -72,7 +72,7 @@ public class RegionLocations {
   this.locations = new HRegionLocation[maxReplicaId + 1];
   for (HRegionLocation loc : locations) {
 if (loc != null) {
-  this.locations[loc.getRegionInfo().getReplicaId()] = loc;
+  this.locations[loc.getRegion().getReplicaId()] = loc;
 }
   }
 }
@@ -146,7 +146,7 @@ public class RegionLocations {
   public RegionLocations remove(HRegionLocation location) {
 if (location == null) return this;
 if (location.getRegion() == null) return this;
-int replicaId = location.getRegionInfo().getReplicaId();
+int replicaId = location.getRegion().getReplicaId();
 if (replicaId >= locations.length) return this;
 
 // check whether something to remove. HRL.compareTo() compares ONLY the
@@ -203,14 +203,14 @@ public class RegionLocations {
 // in case of region replication going down, we might have a leak here.
 int max = other.locations.length;
 
-HRegionInfo regionInfo = null;
+RegionInfo regionInfo = null;
 for (int i = 0; i < max; i++) {
   HRegionLocation thisLoc = this.getRegionLocation(i);
   HRegionLocation otherLoc = other.getRegionLocation(i);
-  if (regionInfo == null && otherLoc != null && otherLoc.getRegionInfo() 
!= null) {
+  if (regionInfo == null && otherLoc != null && otherLoc.getRegion() != 
null) {
 // regionInfo is the first non-null HRI from other RegionLocations. We 
use it to ensure that
 // all replica region infos belong to the same region with same region 
id.
-regionInfo = otherLoc.getRegionInfo();
+regionInfo = otherLoc.getRegion();
   }
 
   HRegionLocation selectedLoc = selectRegionLocation(thisLoc,
@@ -232,7 +232,7 @@ public class RegionLocations {
   

[03/15] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-03 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
deleted file mode 100644
index 5e248f1..000
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ /dev/null
@@ -1,2031 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.thrift;
-
-import static org.apache.hadoop.hbase.util.Bytes.getBytes;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
-
-import org.apache.commons.lang3.ArrayUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell.Type;
-import org.apache.hadoop.hbase.CellBuilder;
-import org.apache.hadoop.hbase.CellBuilderFactory;
-import org.apache.hadoop.hbase.CellBuilderType;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Append;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.OperationWithAttributes;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
-import org.apache.hadoop.hbase.log.HBaseMarkers;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-import org.apache.hadoop.hbase.security.SecurityUtil;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-import org.apache.hadoop.hbase.thrift.generated.Hbase;
-import org.apache.hadoop.hbase.thrift.generated.IOError;
-import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
-import org.apache.hadoop.hbase.thrift.generated.Mutation;
-import org.apache.hadoop.hbase.thrift.generated.TAppend;
-import org.apache.hadoop.hbase.thrift.generated.TCell;
-import org.apache.hadoop.hbase.thrift.generated.TIncrement;
-import 

[05/15] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-03 Thread zhangduo
HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 
server


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e4b6b4af
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e4b6b4af
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e4b6b4af

Branch: refs/heads/HBASE-21512
Commit: e4b6b4afb933a961f543537875f87a2dc62d3757
Parents: f0b50a8
Author: Allan Yang 
Authored: Wed Jan 2 16:13:17 2019 +0800
Committer: Allan Yang 
Committed: Wed Jan 2 16:13:57 2019 +0800

--
 .../apache/hadoop/hbase/thrift/Constants.java   |  151 ++
 .../hbase/thrift/HBaseServiceHandler.java   |   90 +
 .../hbase/thrift/HbaseHandlerMetricsProxy.java  |   20 +-
 .../apache/hadoop/hbase/thrift/ImplType.java|  143 ++
 .../hadoop/hbase/thrift/IncrementCoalescer.java |6 +-
 .../hbase/thrift/ThriftHBaseServiceHandler.java | 1347 
 .../hadoop/hbase/thrift/ThriftHttpServlet.java  |   12 +-
 .../hadoop/hbase/thrift/ThriftServer.java   |  698 +-
 .../hadoop/hbase/thrift/ThriftServerRunner.java | 2031 --
 .../thrift2/ThriftHBaseServiceHandler.java  |   69 +-
 .../hadoop/hbase/thrift2/ThriftServer.java  |  594 +
 .../resources/hbase-webapps/thrift/thrift.jsp   |2 +-
 .../hbase/thrift/TestThriftHttpServer.java  |   28 +-
 .../hadoop/hbase/thrift/TestThriftServer.java   |   58 +-
 .../hbase/thrift/TestThriftServerCmdLine.java   |   48 +-
 .../thrift/TestThriftSpnegoHttpServer.java  |   21 +-
 .../hbase/thrift2/TestThrift2HttpServer.java|   90 +
 .../hbase/thrift2/TestThrift2ServerCmdLine.java |   99 +
 .../thrift2/TestThriftHBaseServiceHandler.java  |   15 +-
 19 files changed, 2711 insertions(+), 2811 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
new file mode 100644
index 000..8e3d004
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Thrift related constants
+ */
+@InterfaceAudience.Private
+public final class Constants {
+  private Constants(){}
+
+  public static final int DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k
+
+  public static final String SERVER_TYPE_CONF_KEY =
+  "hbase.regionserver.thrift.server.type";
+
+  public static final String COMPACT_CONF_KEY = 
"hbase.regionserver.thrift.compact";
+  public static final boolean COMPACT_CONF_DEFAULT = false;
+
+  public static final String FRAMED_CONF_KEY = 
"hbase.regionserver.thrift.framed";
+  public static final boolean FRAMED_CONF_DEFAULT = false;
+
+  public static final String MAX_FRAME_SIZE_CONF_KEY =
+  "hbase.regionserver.thrift.framed.max_frame_size_in_mb";
+  public static final int MAX_FRAME_SIZE_CONF_DEFAULT = 2;
+
+  public static final String COALESCE_INC_KEY = 
"hbase.regionserver.thrift.coalesceIncrement";
+  public static final String USE_HTTP_CONF_KEY = 
"hbase.regionserver.thrift.http";
+
+  public static final String HTTP_MIN_THREADS_KEY = 
"hbase.thrift.http_threads.min";
+  public static final int HTTP_MIN_THREADS_KEY_DEFAULT = 2;
+
+  public static final String HTTP_MAX_THREADS_KEY = 
"hbase.thrift.http_threads.max";
+  public static final int HTTP_MAX_THREADS_KEY_DEFAULT = 100;
+
+  // ssl related configs
+  public static final String THRIFT_SSL_ENABLED_KEY = 
"hbase.thrift.ssl.enabled";
+  public static final String THRIFT_SSL_KEYSTORE_STORE_KEY = 
"hbase.thrift.ssl.keystore.store";
+  public static final String THRIFT_SSL_KEYSTORE_PASSWORD_KEY =
+  "hbase.thrift.ssl.keystore.password";
+  public static final String 

[01/15] hbase git commit: HBASE-21547 Precommit uses master flaky list for other branches [Forced Update!]

2019-01-03 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/HBASE-21512 44462a48e -> 04e6909ad (forced update)


HBASE-21547 Precommit uses master flaky list for other branches

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f0b50a8f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f0b50a8f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f0b50a8f

Branch: refs/heads/HBASE-21512
Commit: f0b50a8f9b6db266c1e0f307fda6a4ad7b852135
Parents: ec948f5
Author: Peter Somogyi 
Authored: Fri Dec 28 15:16:48 2018 +0100
Committer: Peter Somogyi 
Committed: Wed Jan 2 08:26:57 2019 +0100

--
 dev-support/hbase-personality.sh | 14 ++
 1 file changed, 14 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f0b50a8f/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 3856a29..8dd24be 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -272,6 +272,20 @@ function get_include_exclude_tests_arg
 yetus_error "Wget error $? in fetching includes file from url" \
  "${INCLUDE_TESTS_URL}. Ignoring and proceeding."
   fi
+  else
+# Use branch specific exclude list when EXCLUDE_TESTS_URL and 
INCLUDE_TESTS_URL are empty
+
FLAKY_URL="https://builds.apache.org/job/HBase-Find-Flaky-Tests/job/${PATCH_BRANCH}/lastSuccessfulBuild/artifact/excludes/;
+if wget "${FLAKY_URL}" -O "excludes"; then
+  excludes=$(cat excludes)
+yetus_debug "excludes=${excludes}"
+if [[ -n "${excludes}" ]]; then
+  eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
+fi
+rm excludes
+  else
+yetus_error "Wget error $? in fetching excludes file from url" \
+ "${FLAKY_URL}. Ignoring and proceeding."
+  fi
   fi
 }
 



[09/15] hbase git commit: HBASE-21662 Add append_peer_exclude_namespaces and remove_peer_exclude_namespaces shell commands

2019-01-03 Thread zhangduo
HBASE-21662 Add append_peer_exclude_namespaces and 
remove_peer_exclude_namespaces shell commands

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/466fa920
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/466fa920
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/466fa920

Branch: refs/heads/HBASE-21512
Commit: 466fa920fee572fe20db3b77ebf539dc304d5f31
Parents: db66e6c
Author: meiyi 
Authored: Wed Jan 2 14:08:22 2019 +0800
Committer: Guanghao Zhang 
Committed: Thu Jan 3 10:21:43 2019 +0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 39 +++
 hbase-shell/src/main/ruby/shell.rb  |  2 +
 .../commands/append_peer_exclude_namespaces.rb  | 47 +
 .../commands/remove_peer_exclude_namespaces.rb  | 45 
 .../test/ruby/hbase/replication_admin_test.rb   | 74 
 5 files changed, 207 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/466fa920/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index c01b6ea..e061168 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -285,6 +285,45 @@ module Hbase
   end
 end
 
+# Append exclude namespaces config for the specified peer
+def append_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  if rpc.getExcludeNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getExcludeNamespaces)
+  end
+  namespaces.each do |n|
+ns_set.add(n)
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
+# Remove exclude namespaces config for the specified peer
+def remove_peer_exclude_namespaces(id, namespaces)
+  unless namespaces.nil?
+rpc = get_peer_config(id)
+unless rpc.nil?
+  ns_set = rpc.getExcludeNamespaces
+  unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
+namespaces.each do |n|
+  ns_set.remove(n)
+end
+  end
+  builder = ReplicationPeerConfig.newBuilder(rpc)
+  builder.setExcludeNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
+end
+  end
+end
+
 def set_peer_replicate_all(id, replicate_all)
   rpc = get_peer_config(id)
   return if rpc.nil?

http://git-wip-us.apache.org/repos/asf/hbase/blob/466fa920/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index 1507ca3..1f7eae6 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -384,6 +384,8 @@ Shell.load_command_group(
 append_peer_namespaces
 remove_peer_namespaces
 set_peer_exclude_namespaces
+append_peer_exclude_namespaces
+remove_peer_exclude_namespaces
 show_peer_tableCFs
 set_peer_tableCFs
 set_peer_exclude_tableCFs

http://git-wip-us.apache.org/repos/asf/hbase/blob/466fa920/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
--
diff --git 
a/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb 
b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
new file mode 100644
index 000..4f500c8
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/append_peer_exclude_namespaces.rb
@@ -0,0 +1,47 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language 

[13/15] hbase git commit: HBASE-21516 Use AsyncConnection instead of Connection in SecureBulkLoadManager

2019-01-03 Thread zhangduo
HBASE-21516 Use AsyncConnection instead of Connection in SecureBulkLoadManager


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/721af37c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/721af37c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/721af37c

Branch: refs/heads/HBASE-21512
Commit: 721af37c0ce2777620334cfd6fbb14a430533704
Parents: 6fff6c8
Author: zhangduo 
Authored: Sat Dec 1 21:15:48 2018 +0800
Committer: Duo Zhang 
Committed: Fri Jan 4 14:31:41 2019 +0800

--
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  5 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  7 ++-
 .../hbase/regionserver/HRegionServer.java   |  2 +-
 .../regionserver/SecureBulkLoadManager.java | 24 +
 .../hadoop/hbase/security/token/TokenUtil.java  | 57 +++-
 .../hbase/security/token/TestTokenUtil.java | 42 +++
 6 files changed, 96 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/721af37c/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index a3d49b5..d9e620b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -261,13 +261,12 @@ public final class ProtobufUtil {
* just {@link ServiceException}. Prefer this method to
* {@link #getRemoteException(ServiceException)} because trying to
* contain direct protobuf references.
-   * @param e
*/
-  public static IOException handleRemoteException(Exception e) {
+  public static IOException handleRemoteException(Throwable e) {
 return makeIOExceptionOfException(e);
   }
 
-  private static IOException makeIOExceptionOfException(Exception e) {
+  private static IOException makeIOExceptionOfException(Throwable e) {
 Throwable t = e;
 if (e instanceof ServiceException ||
 e instanceof 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/721af37c/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index fea81f1..de2fb7d 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -40,7 +40,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.ByteBufferExtendedCell;
@@ -123,6 +122,7 @@ import 
org.apache.hbase.thirdparty.com.google.protobuf.Service;
 import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
 import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
 import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
@@ -343,13 +343,12 @@ public final class ProtobufUtil {
* just {@link ServiceException}. Prefer this method to
* {@link #getRemoteException(ServiceException)} because trying to
* contain direct protobuf references.
-   * @param e
*/
-  public static IOException handleRemoteException(Exception e) {
+  public static IOException handleRemoteException(Throwable e) {
 return makeIOExceptionOfException(e);
   }
 
-  private static IOException makeIOExceptionOfException(Exception e) {
+  private static IOException makeIOExceptionOfException(Throwable e) {
 Throwable t = e;
 if (e instanceof ServiceException) {
   t = e.getCause();

http://git-wip-us.apache.org/repos/asf/hbase/blob/721af37c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 

[06/15] hbase git commit: HBASE-17356 Add replica get support

2019-01-03 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/db66e6cc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
index d705d7c..28db7e8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.client;
 import static java.util.stream.Collectors.toList;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
 import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.RpcChannel;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,11 +32,11 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.TableName;
 import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
@@ -45,9 +45,12 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
@@ -63,7 +66,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 
 /**
  * The implementation of RawAsyncTable.
- * 
+ * 
  * The word 'Raw' means that this is a low level class. The returned {@link 
CompletableFuture} will
  * be finished inside the rpc framework thread, which means that the callbacks 
registered to the
  * {@link CompletableFuture} will also be executed inside the rpc framework 
thread. So users who use
@@ -74,6 +77,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType
 @InterfaceAudience.Private
 class RawAsyncTableImpl implements AsyncTable {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(RawAsyncTableImpl.class);
+
   private final AsyncConnectionImpl conn;
 
   private final TableName tableName;
@@ -204,58 +209,126 @@ class RawAsyncTableImpl implements 
AsyncTable {
 
   private  SingleRequestCallerBuilder newCaller(byte[] row, long 
rpcTimeoutNs) {
 return conn.callerFactory. single().table(tableName).row(row)
-.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
-.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
-.pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
-.startLogErrorsCnt(startLogErrorsCnt);
+  .rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
+  .operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
+  .pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
+  .startLogErrorsCnt(startLogErrorsCnt);
   }
 
   private  SingleRequestCallerBuilder newCaller(Row row, long 
rpcTimeoutNs) {
 return newCaller(row.getRow(), rpcTimeoutNs);
   }
 
+  private CompletableFuture get(Get get, int replicaId, long 
timeoutNs) {
+return this. newCaller(get, timeoutNs)
+  .action((controller, loc, stub) -> RawAsyncTableImpl
+. call(controller, loc, stub, 
get,
+  RequestConverter::buildGetRequest, (s, c, req, done) -> s.get(c, 
req, done),
+  (c, resp) -> ProtobufUtil.toResult(resp.getResult(), 
c.cellScanner(
+  .replicaId(replicaId).call();
+  }
+
+  // Connect the two futures, if the src future is done, then mark the dst 
future as done. And if
+  // the dst future is done, then cancel the src future. This is used for 
timeline consistent read.
+  private  void connect(CompletableFuture srcFuture, 
CompletableFuture dstFuture) {
+addListener(srcFuture, (r, e) -> {
+  if (e != null) {
+dstFuture.completeExceptionally(e);
+  } else {
+dstFuture.complete(r);
+  }
+});
+// The cancellation may be a dummy one as the dstFuture may be completed 
by this srcFuture.
+// 

[12/15] hbase git commit: HBASE-21515 Also initialize an AsyncClusterConnection in HRegionServer

2019-01-03 Thread zhangduo
HBASE-21515 Also initialize an AsyncClusterConnection in HRegionServer


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6fff6c81
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6fff6c81
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6fff6c81

Branch: refs/heads/HBASE-21512
Commit: 6fff6c81fe1658827159e85845c9d3e595797375
Parents: 3fbdd5b
Author: zhangduo 
Authored: Fri Nov 30 08:23:47 2018 +0800
Committer: Duo Zhang 
Committed: Fri Jan 4 14:31:41 2019 +0800

--
 .../hbase/client/AsyncClusterConnection.java| 38 
 .../hbase/client/AsyncConnectionImpl.java   | 39 ++--
 .../hbase/client/ClusterConnectionFactory.java  | 63 
 .../hadoop/hbase/client/ConnectionFactory.java  |  5 +-
 .../hadoop/hbase/util/ReflectionUtils.java  | 22 ---
 .../java/org/apache/hadoop/hbase/Server.java| 20 +++
 .../org/apache/hadoop/hbase/master/HMaster.java |  3 +
 .../hbase/regionserver/HRegionServer.java   | 56 -
 .../regionserver/ReplicationSyncUp.java |  6 ++
 .../hadoop/hbase/MockRegionServerServices.java  |  5 ++
 .../client/TestAsyncNonMetaRegionLocator.java   |  2 +-
 ...syncNonMetaRegionLocatorConcurrenyLimit.java |  2 +-
 .../client/TestAsyncRegionLocatorTimeout.java   |  2 +-
 ...TestAsyncSingleRequestRpcRetryingCaller.java |  4 +-
 .../hbase/client/TestAsyncTableNoncedRetry.java |  2 +-
 .../hbase/master/MockNoopMasterServices.java|  6 ++
 .../hadoop/hbase/master/MockRegionServer.java   |  5 ++
 .../hbase/master/TestActiveMasterManager.java   |  6 ++
 .../hbase/master/cleaner/TestHFileCleaner.java  |  6 ++
 .../master/cleaner/TestHFileLinkCleaner.java|  6 ++
 .../hbase/master/cleaner/TestLogsCleaner.java   |  6 ++
 .../cleaner/TestReplicationHFileCleaner.java|  6 ++
 .../regionserver/TestHeapMemoryManager.java |  6 ++
 .../hbase/regionserver/TestSplitLogWorker.java  |  6 ++
 .../hbase/regionserver/TestWALLockup.java   |  6 ++
 .../TestReplicationTrackerZKImpl.java   |  6 ++
 .../TestReplicationSourceManager.java   |  6 ++
 .../security/token/TestTokenAuthentication.java |  6 ++
 .../apache/hadoop/hbase/util/MockServer.java|  6 ++
 29 files changed, 302 insertions(+), 50 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6fff6c81/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
new file mode 100644
index 000..c7dea25
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * The asynchronous connection for internal usage.
+ */
+@InterfaceAudience.Private
+public interface AsyncClusterConnection extends AsyncConnection {
+
+  /**
+   * Get the nonce generator for this connection.
+   */
+  NonceGenerator getNonceGenerator();
+
+  /**
+   * Get the rpc client we used to communicate with other servers.
+   */
+  RpcClient getRpcClient();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6fff6c81/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
index 361d5b2..188e830 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
@@ 

[14/15] hbase git commit: HBASE-21526 Use AsyncClusterConnection in ServerManager for getRsAdmin

2019-01-03 Thread zhangduo
HBASE-21526 Use AsyncClusterConnection in ServerManager for getRsAdmin


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d051abb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d051abb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d051abb

Branch: refs/heads/HBASE-21512
Commit: 0d051abb71d2f8b0846c078ce7ce452c4372aa33
Parents: 721af37
Author: zhangduo 
Authored: Thu Dec 6 21:25:34 2018 +0800
Committer: Duo Zhang 
Committed: Fri Jan 4 14:34:02 2019 +0800

--
 .../hbase/client/AsyncClusterConnection.java|   6 +
 .../hbase/client/AsyncConnectionImpl.java   |   5 +
 .../hbase/client/AsyncRegionServerAdmin.java| 210 +++
 .../apache/hadoop/hbase/util/FutureUtils.java   |   2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |  15 +-
 .../hadoop/hbase/master/ServerManager.java  |  67 --
 .../master/procedure/RSProcedureDispatcher.java |  44 ++--
 7 files changed, 263 insertions(+), 86 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d051abb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
index c7dea25..1327fd7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncClusterConnection.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.ipc.RpcClient;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -27,6 +28,11 @@ import org.apache.yetus.audience.InterfaceAudience;
 public interface AsyncClusterConnection extends AsyncConnection {
 
   /**
+   * Get the admin service for the given region server.
+   */
+  AsyncRegionServerAdmin getRegionServerAdmin(ServerName serverName);
+
+  /**
* Get the nonce generator for this connection.
*/
   NonceGenerator getNonceGenerator();

http://git-wip-us.apache.org/repos/asf/hbase/blob/0d051abb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
index 188e830..4e7f421 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
@@ -330,4 +330,9 @@ class AsyncConnectionImpl implements AsyncClusterConnection 
{
 return new AsyncBufferedMutatorBuilderImpl(connConf, 
getTableBuilder(tableName, pool),
   RETRY_TIMER);
   }
+
+  @Override
+  public AsyncRegionServerAdmin getRegionServerAdmin(ServerName serverName) {
+return new AsyncRegionServerAdmin(serverName, this);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0d051abb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
new file mode 100644
index 000..9accd89
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.concurrent.CompletableFuture;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.ipc.HBaseRpcController;
+import 

[04/15] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-03 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
index fc00327..6d11ac6 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
@@ -18,16 +18,132 @@
 
 package org.apache.hadoop.hbase.thrift;
 
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_DEAFULT;
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_BIND_ADDR;
+import static 
org.apache.hadoop.hbase.thrift.Constants.DEFAULT_HTTP_MAX_HEADER_SIZE;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_LISTEN_PORT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.INFOPORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.KEEP_ALIVE_SEC_OPTION;
+import static 
org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_QUEUE_SIZE_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MIN_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.READ_TIMEOUT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.SELECTOR_NUM_OPTION;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_INTERFACE_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_NAMESERVER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_FILTERS;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_QOP_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SELECTOR_NUM;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_ENABLED_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_CIPHER_SUITES_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_PROTOCOLS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_INCLUDE_CIPHER_SUITES_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_INCLUDE_PROTOCOLS_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_PASSWORD_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_STORE_KEY;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_SUPPORT_PROXYUSER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.USE_HTTP_CONF_KEY;
+
+import 

[10/15] hbase git commit: HBASE-21670 Add 2.0.4 to download page

2019-01-03 Thread zhangduo
HBASE-21670 Add 2.0.4 to download page


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c3005653
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c3005653
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c3005653

Branch: refs/heads/HBASE-21512
Commit: c300565311edfa056ddd5abd5480bc7f93fe07a2
Parents: 466fa92
Author: stack 
Authored: Thu Jan 3 21:35:14 2019 -0800
Committer: stack 
Committed: Thu Jan 3 21:35:14 2019 -0800

--
 src/site/xdoc/downloads.xml | 13 ++---
 1 file changed, 6 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c3005653/src/site/xdoc/downloads.xml
--
diff --git a/src/site/xdoc/downloads.xml b/src/site/xdoc/downloads.xml
index 4bb7f51..2859779 100644
--- a/src/site/xdoc/downloads.xml
+++ b/src/site/xdoc/downloads.xml
@@ -32,7 +32,6 @@ under the License.
   https://www.apache.org/dyn/closer.cgi#verify;>Verify The Integrity 
Of The Files for
   how to verify your mirrored downloads.
   
-  NOTE: 2.1.1 and 2.0.3 have a serious memory 
leak. See HBASE-21551. We are working on replacement releases.
   
   
 
@@ -67,23 +66,23 @@ under the License.
 
 
   
-2.0.3
+2.0.4
   
   
 2018/12/02
   
   
-https://apache.org/dist/hbase/2.0.3/compat-check-report-2.0.2-vs-2.0.3.html;>2.0.2
 vs 2.0.3
+https://apache.org/dist/hbase/2.0.4/compatibility_report_2.0.3vs2.0.4.html;>2.0.3
 vs 2.0.4
   
   
-https://apache.org/dist/hbase/2.0.3/CHANGES.md;>Changes
+https://apache.org/dist/hbase/2.0.4/CHANGES.md;>Changes
   
   
-https://apache.org/dist/hbase/2.0.3/RELEASENOTES.md;>Release 
Notes
+https://apache.org/dist/hbase/2.0.4/RELEASENOTES.md;>Release 
Notes
   
   
-https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz;>src (https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.asc;>asc) 

-https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-bin.tar.gz;>bin (https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.asc;>asc)
+https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz;>src (https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.asc;>asc) 

+https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-bin.tar.gz;>bin (https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.asc;>asc)
   
 
 



[02/15] hbase git commit: HBASE-21652 Refactor ThriftServer making thrift2 server inherited from thrift1 server

2019-01-03 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/e4b6b4af/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
index 5681569..fa3d39d 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
@@ -18,355 +18,86 @@
  */
 package org.apache.hadoop.hbase.thrift2;
 
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.security.PrivilegedAction;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
+import static org.apache.hadoop.hbase.thrift.Constants.READONLY_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_READONLY_ENABLED;
+import static 
org.apache.hadoop.hbase.thrift.Constants.THRIFT_READONLY_ENABLED_DEFAULT;
 
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
+import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.http.InfoServer;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityUtil;
 import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.CallQueue;
-import org.apache.hadoop.hbase.thrift.THBaseThreadPoolExecutor;
-import org.apache.hadoop.hbase.thrift.ThriftMetrics;
+import org.apache.hadoop.hbase.thrift.HBaseServiceHandler;
+import org.apache.hadoop.hbase.thrift.HbaseHandlerMetricsProxy;
 import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.util.JvmPauseMonitor;
-import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.thrift.TException;
 import org.apache.thrift.TProcessor;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.protocol.TProtocolFactory;
-import org.apache.thrift.server.THsHaServer;
-import org.apache.thrift.server.TNonblockingServer;
-import org.apache.thrift.server.TServer;
-import org.apache.thrift.server.TThreadPoolServer;
-import org.apache.thrift.server.TThreadedSelectorServer;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TNonblockingServerSocket;
-import org.apache.thrift.transport.TNonblockingServerTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TServerTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.apache.thrift.transport.TTransportFactory;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.OptionGroup;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
-import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
 
 /**
  * ThriftServer - this class starts up a Thrift server which implements the 
HBase API specified in
  * the HbaseClient.thrift IDL file.
  */
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 
"NM_SAME_SIMPLE_NAME_AS_SUPERCLASS",
+justification = "Change the name will be an incompatible change, will do 
it later")
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
 @SuppressWarnings({ "rawtypes", "unchecked" })
-public class 

[07/15] hbase git commit: HBASE-17356 Add replica get support

2019-01-03 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/db66e6cc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 4f73909..869a630 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.client;
 
 import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
+import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
 
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcChannel;
@@ -491,23 +492,23 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture getDescriptor(TableName tableName) 
{
 CompletableFuture future = new CompletableFuture<>();
-this.> newMasterCaller()
-.action(
-  (controller, stub) -> this
-  .> call(
-controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName), (s,
-c, req, done) -> s.getTableDescriptors(c, req, done), 
(resp) -> resp
-.getTableSchemaList())).call().whenComplete((tableSchemas, 
error) -> {
-  if (error != null) {
-future.completeExceptionally(error);
-return;
-  }
-  if (!tableSchemas.isEmpty()) {
-
future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
-  } else {
-future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
-  }
-});
+addListener(this.> newMasterCaller()
+  .action((controller, stub) -> this
+.> call(
+  controller, stub, 
RequestConverter.buildGetTableDescriptorsRequest(tableName),
+  (s, c, req, done) -> s.getTableDescriptors(c, req, done),
+  (resp) -> resp.getTableSchemaList()))
+  .call(), (tableSchemas, error) -> {
+if (error != null) {
+  future.completeExceptionally(error);
+  return;
+}
+if (!tableSchemas.isEmpty()) {
+  future.complete(ProtobufUtil.toTableDescriptor(tableSchemas.get(0)));
+} else {
+  future.completeExceptionally(new 
TableNotFoundException(tableName.getNameAsString()));
+}
+  });
 return future;
   }
 
@@ -590,7 +591,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableEnabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -607,7 +608,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   @Override
   public CompletableFuture isTableDisabled(TableName tableName) {
 CompletableFuture future = new CompletableFuture<>();
-AsyncMetaTableAccessor.getTableState(metaTable, 
tableName).whenComplete((state, error) -> {
+addListener(AsyncMetaTableAccessor.getTableState(metaTable, tableName), 
(state, error) -> {
   if (error != null) {
 future.completeExceptionally(error);
 return;
@@ -636,40 +637,37 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
   private CompletableFuture isTableAvailable(TableName tableName,
   Optional splitKeys) {
 CompletableFuture future = new CompletableFuture<>();
-isTableEnabled(tableName).whenComplete(
-  (enabled, error) -> {
-if (error != null) {
-  future.completeExceptionally(error);
-  return;
-}
-if (!enabled) {
-  future.complete(false);
-} else {
-  AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, 
Optional.of(tableName))
-  .whenComplete(
-(locations, error1) -> {
-  if (error1 != null) {
-future.completeExceptionally(error1);
-return;
-  }
-  List notDeployedRegions =
-  locations.stream().filter(loc -> loc.getServerName() == 
null)
-  .collect(Collectors.toList());
-  if (notDeployedRegions.size() > 0) {
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Table " + tableName + " has " + 
notDeployedRegions.size()
-  + " regions");
-}
-future.complete(false);
-return;
-  }
+

hbase git commit: HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)

2019-01-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 a12bc94ac -> 2ba1da0ca


HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2ba1da0c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2ba1da0c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2ba1da0c

Branch: refs/heads/branch-2.0
Commit: 2ba1da0cac29700d76d2fa0ef16d19d30e2a2ce6
Parents: a12bc94
Author: Nihal Jain 
Authored: Wed Jan 2 01:24:08 2019 +0530
Committer: stack 
Committed: Thu Jan 3 22:00:02 2019 -0800

--
 hbase-shell/src/main/ruby/hbase_constants.rb  |  2 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 36 --
 2 files changed, 35 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2ba1da0c/hbase-shell/src/main/ruby/hbase_constants.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb 
b/hbase-shell/src/main/ruby/hbase_constants.rb
index 28484cb..554e738 100644
--- a/hbase-shell/src/main/ruby/hbase_constants.rb
+++ b/hbase-shell/src/main/ruby/hbase_constants.rb
@@ -43,8 +43,10 @@ module HBaseConstants
   METADATA = org.apache.hadoop.hbase.HConstants::METADATA
   STOPROW = 'STOPROW'.freeze
   STARTROW = 'STARTROW'.freeze
+  STARTKEY = STARTROW
   ROWPREFIXFILTER = 'ROWPREFIXFILTER'.freeze
   ENDROW = STOPROW
+  ENDKEY = STOPROW
   RAW = 'RAW'.freeze
   LIMIT = 'LIMIT'.freeze
   METHOD = 'METHOD'.freeze

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ba1da0c/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 5ec317a..b3343e2 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -476,6 +476,16 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
+define_test "scan should support STARTKEY parameter" do
+  res = @test_table._scan_internal STARTKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_nil(res['1'])
+  assert_not_nil(res['2'])
+  assert_not_nil(res['2']['x:a'])
+  assert_not_nil(res['2']['x:b'])
+end
+
 define_test "scan should support STOPROW parameter" do
   res = @test_table._scan_internal STOPROW => '2'
   assert_not_nil(res)
@@ -486,7 +496,27 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 1)" do
+define_test "scan should support ENDROW parameter" do
+  res = @test_table._scan_internal ENDROW => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test "scan should support ENDKEY parameter" do
+  res = @test_table._scan_internal ENDKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test 'scan should support ROWPREFIXFILTER parameter (test 1)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '1'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -496,7 +526,7 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 2)" do
+define_test 'scan should support ROWPREFIXFILTER parameter (test 2)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '2'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -506,7 +536,7 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
-define_test "scan should support LIMIT parameter" do
+define_test 'scan should support LIMIT parameter' do
   res = @test_table._scan_internal LIMIT => 1
   assert_not_nil(res)
   assert_kind_of(Hash, res)



hbase git commit: HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)

2019-01-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 24c8fd02c -> 184cff0d4


HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/184cff0d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/184cff0d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/184cff0d

Branch: refs/heads/branch-2.1
Commit: 184cff0d4d7ba83aa0662ca5647fbd6b4974dfd0
Parents: 24c8fd0
Author: Nihal Jain 
Authored: Wed Jan 2 01:24:08 2019 +0530
Committer: stack 
Committed: Thu Jan 3 21:59:45 2019 -0800

--
 hbase-shell/src/main/ruby/hbase_constants.rb  |  2 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 36 --
 2 files changed, 35 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/184cff0d/hbase-shell/src/main/ruby/hbase_constants.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb 
b/hbase-shell/src/main/ruby/hbase_constants.rb
index 1a2c732..9cfe0c1 100644
--- a/hbase-shell/src/main/ruby/hbase_constants.rb
+++ b/hbase-shell/src/main/ruby/hbase_constants.rb
@@ -43,8 +43,10 @@ module HBaseConstants
   METADATA = org.apache.hadoop.hbase.HConstants::METADATA
   STOPROW = 'STOPROW'.freeze
   STARTROW = 'STARTROW'.freeze
+  STARTKEY = STARTROW
   ROWPREFIXFILTER = 'ROWPREFIXFILTER'.freeze
   ENDROW = STOPROW
+  ENDKEY = STOPROW
   RAW = 'RAW'.freeze
   LIMIT = 'LIMIT'.freeze
   METHOD = 'METHOD'.freeze

http://git-wip-us.apache.org/repos/asf/hbase/blob/184cff0d/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 5ec317a..b3343e2 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -476,6 +476,16 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
+define_test "scan should support STARTKEY parameter" do
+  res = @test_table._scan_internal STARTKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_nil(res['1'])
+  assert_not_nil(res['2'])
+  assert_not_nil(res['2']['x:a'])
+  assert_not_nil(res['2']['x:b'])
+end
+
 define_test "scan should support STOPROW parameter" do
   res = @test_table._scan_internal STOPROW => '2'
   assert_not_nil(res)
@@ -486,7 +496,27 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 1)" do
+define_test "scan should support ENDROW parameter" do
+  res = @test_table._scan_internal ENDROW => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test "scan should support ENDKEY parameter" do
+  res = @test_table._scan_internal ENDKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test 'scan should support ROWPREFIXFILTER parameter (test 1)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '1'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -496,7 +526,7 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 2)" do
+define_test 'scan should support ROWPREFIXFILTER parameter (test 2)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '2'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -506,7 +536,7 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
-define_test "scan should support LIMIT parameter" do
+define_test 'scan should support LIMIT parameter' do
   res = @test_table._scan_internal LIMIT => 1
   assert_not_nil(res)
   assert_kind_of(Hash, res)



hbase git commit: HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)

2019-01-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 90b9ed152 -> 97219f1ac


HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/97219f1a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/97219f1a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/97219f1a

Branch: refs/heads/branch-2
Commit: 97219f1aceed4c48c77e28b1d1fbf36814e59539
Parents: 90b9ed1
Author: Nihal Jain 
Authored: Wed Jan 2 01:24:08 2019 +0530
Committer: stack 
Committed: Thu Jan 3 21:59:18 2019 -0800

--
 hbase-shell/src/main/ruby/hbase_constants.rb  |  2 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 36 --
 2 files changed, 35 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/97219f1a/hbase-shell/src/main/ruby/hbase_constants.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb 
b/hbase-shell/src/main/ruby/hbase_constants.rb
index 1a2c732..9cfe0c1 100644
--- a/hbase-shell/src/main/ruby/hbase_constants.rb
+++ b/hbase-shell/src/main/ruby/hbase_constants.rb
@@ -43,8 +43,10 @@ module HBaseConstants
   METADATA = org.apache.hadoop.hbase.HConstants::METADATA
   STOPROW = 'STOPROW'.freeze
   STARTROW = 'STARTROW'.freeze
+  STARTKEY = STARTROW
   ROWPREFIXFILTER = 'ROWPREFIXFILTER'.freeze
   ENDROW = STOPROW
+  ENDKEY = STOPROW
   RAW = 'RAW'.freeze
   LIMIT = 'LIMIT'.freeze
   METHOD = 'METHOD'.freeze

http://git-wip-us.apache.org/repos/asf/hbase/blob/97219f1a/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 5ec317a..b3343e2 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -476,6 +476,16 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
+define_test "scan should support STARTKEY parameter" do
+  res = @test_table._scan_internal STARTKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_nil(res['1'])
+  assert_not_nil(res['2'])
+  assert_not_nil(res['2']['x:a'])
+  assert_not_nil(res['2']['x:b'])
+end
+
 define_test "scan should support STOPROW parameter" do
   res = @test_table._scan_internal STOPROW => '2'
   assert_not_nil(res)
@@ -486,7 +496,27 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 1)" do
+define_test "scan should support ENDROW parameter" do
+  res = @test_table._scan_internal ENDROW => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test "scan should support ENDKEY parameter" do
+  res = @test_table._scan_internal ENDKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test 'scan should support ROWPREFIXFILTER parameter (test 1)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '1'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -496,7 +526,7 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 2)" do
+define_test 'scan should support ROWPREFIXFILTER parameter (test 2)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '2'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -506,7 +536,7 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
-define_test "scan should support LIMIT parameter" do
+define_test 'scan should support LIMIT parameter' do
   res = @test_table._scan_internal LIMIT => 1
   assert_not_nil(res)
   assert_kind_of(Hash, res)



hbase git commit: HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)

2019-01-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master c30056531 -> 3fbdd5bbe


HBASE-21630 [shell] Define ENDKEY == STOPROW (we have ENDROW)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3fbdd5bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3fbdd5bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3fbdd5bb

Branch: refs/heads/master
Commit: 3fbdd5bbe9937bdb12984275dafced9cb6746a63
Parents: c300565
Author: Nihal Jain 
Authored: Wed Jan 2 01:24:08 2019 +0530
Committer: stack 
Committed: Thu Jan 3 21:58:28 2019 -0800

--
 hbase-shell/src/main/ruby/hbase_constants.rb  |  2 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 36 --
 2 files changed, 35 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3fbdd5bb/hbase-shell/src/main/ruby/hbase_constants.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb 
b/hbase-shell/src/main/ruby/hbase_constants.rb
index 4c1ad22..9871685 100644
--- a/hbase-shell/src/main/ruby/hbase_constants.rb
+++ b/hbase-shell/src/main/ruby/hbase_constants.rb
@@ -43,8 +43,10 @@ module HBaseConstants
   METADATA = org.apache.hadoop.hbase.HConstants::METADATA
   STOPROW = 'STOPROW'.freeze
   STARTROW = 'STARTROW'.freeze
+  STARTKEY = STARTROW
   ROWPREFIXFILTER = 'ROWPREFIXFILTER'.freeze
   ENDROW = STOPROW
+  ENDKEY = STOPROW
   RAW = 'RAW'.freeze
   LIMIT = 'LIMIT'.freeze
   METHOD = 'METHOD'.freeze

http://git-wip-us.apache.org/repos/asf/hbase/blob/3fbdd5bb/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 5ec317a..b3343e2 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -476,6 +476,16 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
+define_test "scan should support STARTKEY parameter" do
+  res = @test_table._scan_internal STARTKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_nil(res['1'])
+  assert_not_nil(res['2'])
+  assert_not_nil(res['2']['x:a'])
+  assert_not_nil(res['2']['x:b'])
+end
+
 define_test "scan should support STOPROW parameter" do
   res = @test_table._scan_internal STOPROW => '2'
   assert_not_nil(res)
@@ -486,7 +496,27 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 1)" do
+define_test "scan should support ENDROW parameter" do
+  res = @test_table._scan_internal ENDROW => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test "scan should support ENDKEY parameter" do
+  res = @test_table._scan_internal ENDKEY => '2'
+  assert_not_nil(res)
+  assert_kind_of(Hash, res)
+  assert_not_nil(res['1'])
+  assert_not_nil(res['1']['x:a'])
+  assert_not_nil(res['1']['x:b'])
+  assert_nil(res['2'])
+end
+
+define_test 'scan should support ROWPREFIXFILTER parameter (test 1)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '1'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -496,7 +526,7 @@ module Hbase
   assert_nil(res['2'])
 end
 
-define_test "scan should support ROWPREFIXFILTER parameter (test 2)" do
+define_test 'scan should support ROWPREFIXFILTER parameter (test 2)' do
   res = @test_table._scan_internal ROWPREFIXFILTER => '2'
   assert_not_nil(res)
   assert_kind_of(Hash, res)
@@ -506,7 +536,7 @@ module Hbase
   assert_not_nil(res['2']['x:b'])
 end
 
-define_test "scan should support LIMIT parameter" do
+define_test 'scan should support LIMIT parameter' do
   res = @test_table._scan_internal LIMIT => 1
   assert_not_nil(res)
   assert_kind_of(Hash, res)



hbase git commit: HBASE-21670 Add 2.0.4 to download page

2019-01-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 466fa920f -> c30056531


HBASE-21670 Add 2.0.4 to download page


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c3005653
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c3005653
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c3005653

Branch: refs/heads/master
Commit: c300565311edfa056ddd5abd5480bc7f93fe07a2
Parents: 466fa92
Author: stack 
Authored: Thu Jan 3 21:35:14 2019 -0800
Committer: stack 
Committed: Thu Jan 3 21:35:14 2019 -0800

--
 src/site/xdoc/downloads.xml | 13 ++---
 1 file changed, 6 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c3005653/src/site/xdoc/downloads.xml
--
diff --git a/src/site/xdoc/downloads.xml b/src/site/xdoc/downloads.xml
index 4bb7f51..2859779 100644
--- a/src/site/xdoc/downloads.xml
+++ b/src/site/xdoc/downloads.xml
@@ -32,7 +32,6 @@ under the License.
   https://www.apache.org/dyn/closer.cgi#verify;>Verify The Integrity 
Of The Files for
   how to verify your mirrored downloads.
   
-  NOTE: 2.1.1 and 2.0.3 have a serious memory 
leak. See HBASE-21551. We are working on replacement releases.
   
   
 
@@ -67,23 +66,23 @@ under the License.
 
 
   
-2.0.3
+2.0.4
   
   
 2018/12/02
   
   
-https://apache.org/dist/hbase/2.0.3/compat-check-report-2.0.2-vs-2.0.3.html;>2.0.2
 vs 2.0.3
+https://apache.org/dist/hbase/2.0.4/compatibility_report_2.0.3vs2.0.4.html;>2.0.3
 vs 2.0.4
   
   
-https://apache.org/dist/hbase/2.0.3/CHANGES.md;>Changes
+https://apache.org/dist/hbase/2.0.4/CHANGES.md;>Changes
   
   
-https://apache.org/dist/hbase/2.0.3/RELEASENOTES.md;>Release 
Notes
+https://apache.org/dist/hbase/2.0.4/RELEASENOTES.md;>Release 
Notes
   
   
-https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz;>src (https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.asc;>asc) 

-https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-bin.tar.gz;>bin (https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.3/hbase-2.0.3-src.tar.gz.asc;>asc)
+https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz;>src (https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.asc;>asc) 

+https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-bin.tar.gz;>bin (https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.sha512;>sha512
 https://apache.org/dist/hbase/2.0.4/hbase-2.0.4-src.tar.gz.asc;>asc)
   
 
 



svn commit: r31757 - /dev/hbase/hbase-2.0.4RC1/ /release/hbase/2.0.3/ /release/hbase/2.0.4/

2019-01-03 Thread stack
Author: stack
Date: Fri Jan  4 05:27:58 2019
New Revision: 31757

Log:
Add 2.0.4RC1 as 2.0.4 release and remove 2.0.3

Added:
release/hbase/2.0.4/
  - copied from r31756, dev/hbase/hbase-2.0.4RC1/
Removed:
dev/hbase/hbase-2.0.4RC1/
release/hbase/2.0.3/



hbase git commit: HBASE-21669 Move branch-2.0 to 2.0.5-SNAPSHOT version after release of 2.0.4

2019-01-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 ff23c0221 -> a12bc94ac


HBASE-21669 Move branch-2.0 to 2.0.5-SNAPSHOT version after release of 2.0.4


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a12bc94a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a12bc94a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a12bc94a

Branch: refs/heads/branch-2.0
Commit: a12bc94ac234d146ffd4b9da07c2c7b1001dd38a
Parents: ff23c02
Author: stack 
Authored: Thu Jan 3 21:24:00 2019 -0800
Committer: stack 
Committed: Thu Jan 3 21:24:00 2019 -0800

--
 hbase-annotations/pom.xml| 2 +-
 hbase-archetypes/hbase-archetype-builder/pom.xml | 2 +-
 hbase-archetypes/hbase-client-project/pom.xml| 2 +-
 hbase-archetypes/hbase-shaded-client-project/pom.xml | 2 +-
 hbase-archetypes/pom.xml | 2 +-
 hbase-assembly/pom.xml   | 2 +-
 hbase-build-configuration/pom.xml| 2 +-
 hbase-build-support/hbase-error-prone/pom.xml| 4 ++--
 hbase-build-support/pom.xml  | 2 +-
 hbase-checkstyle/pom.xml | 4 ++--
 hbase-client/pom.xml | 2 +-
 hbase-common/pom.xml | 2 +-
 hbase-endpoint/pom.xml   | 2 +-
 hbase-examples/pom.xml   | 2 +-
 hbase-external-blockcache/pom.xml| 2 +-
 hbase-hadoop-compat/pom.xml  | 2 +-
 hbase-hadoop2-compat/pom.xml | 2 +-
 hbase-http/pom.xml   | 2 +-
 hbase-it/pom.xml | 2 +-
 hbase-mapreduce/pom.xml  | 2 +-
 hbase-metrics-api/pom.xml| 2 +-
 hbase-metrics/pom.xml| 2 +-
 hbase-procedure/pom.xml  | 2 +-
 hbase-protocol-shaded/pom.xml| 2 +-
 hbase-protocol/pom.xml   | 2 +-
 hbase-replication/pom.xml| 2 +-
 hbase-resource-bundle/pom.xml| 2 +-
 hbase-rest/pom.xml   | 2 +-
 hbase-rsgroup/pom.xml| 2 +-
 hbase-server/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-check-invariants/pom.xml   | 2 +-
 hbase-shaded/hbase-shaded-client/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-mapreduce/pom.xml  | 2 +-
 hbase-shaded/pom.xml | 2 +-
 hbase-shell/pom.xml  | 2 +-
 hbase-testing-util/pom.xml   | 2 +-
 hbase-thrift/pom.xml | 2 +-
 hbase-zookeeper/pom.xml  | 2 +-
 pom.xml  | 2 +-
 39 files changed, 41 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a12bc94a/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index 5703752..d6fc408 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-2.0.4
+2.0.5-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a12bc94a/hbase-archetypes/hbase-archetype-builder/pom.xml
--
diff --git a/hbase-archetypes/hbase-archetype-builder/pom.xml 
b/hbase-archetypes/hbase-archetype-builder/pom.xml
index bb54287..d4209da 100644
--- a/hbase-archetypes/hbase-archetype-builder/pom.xml
+++ b/hbase-archetypes/hbase-archetype-builder/pom.xml
@@ -25,7 +25,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.4
+2.0.5-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a12bc94a/hbase-archetypes/hbase-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-client-project/pom.xml 
b/hbase-archetypes/hbase-client-project/pom.xml
index e219515..c810ba9 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -26,7 +26,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.4
+2.0.5-SNAPSHOT
 ..
   
   hbase-client-project

http://git-wip-us.apache.org/repos/asf/hbase/blob/a12bc94a/hbase-archetypes/hbase-shaded-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml 

[hbase] Git Push Summary

2019-01-03 Thread stack
Repository: hbase
Updated Tags:  refs/tags/rel/2.0.4 [created] e8f5dd5a5


hbase git commit: HBASE-21599 Fix a findbugs issue and some incorrect javadoc links

2019-01-03 Thread elserj
Repository: hbase
Updated Branches:
  refs/heads/HBASE-20952 c738e1575 -> 6e709cf16


HBASE-21599 Fix a findbugs issue and some incorrect javadoc links

Signed-off-by: Ankit Singhal 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e709cf1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e709cf1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e709cf1

Branch: refs/heads/HBASE-20952
Commit: 6e709cf16498855038fdcc5371b816d468e308ec
Parents: c738e15
Author: Josh Elser 
Authored: Thu Dec 13 17:31:48 2018 -0500
Committer: Josh Elser 
Committed: Thu Jan 3 12:52:49 2019 -0500

--
 .../regionserver/ReplicationSourceManager.java  |  5 +-
 .../hadoop/hbase/wal/DisabledWALProvider.java   | 52 ++--
 2 files changed, 30 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6e709cf1/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index b948d7e..dd31a01 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -94,7 +94,8 @@ import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFacto
  * operations.
  * Need synchronized on {@link #walsById}. There are four methods which 
modify it,
  * {@link #addPeer(String)}, {@link #removePeer(String)},
- * {@link #cleanOldLogs(String, boolean, ReplicationSourceInterface)} and 
{@link #preLogRoll(Path)}.
+ * {@link #cleanOldLogs(String, boolean, ReplicationSourceInterface)} and
+ * {@link #preLogRoll(WALIdentity)}.
  * {@link #walsById} is a ConcurrentHashMap and there is a Lock for peer id in
  * {@link PeerProcedureHandlerImpl}. So there is no race between {@link 
#addPeer(String)} and
  * {@link #removePeer(String)}. {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)}
@@ -102,7 +103,7 @@ import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFacto
  * {@link #removePeer(String)} will terminate the {@link 
ReplicationSourceInterface} firstly, then
  * remove the wals from {@link #walsById}. So no race with {@link 
#removePeer(String)}. The only
  * case need synchronized is {@link #cleanOldLogs(String, boolean, 
ReplicationSourceInterface)} and
- * {@link #preLogRoll(Path)}.
+ * {@link #preLogRoll(WALIdentity)}.
  * No need synchronized on {@link #walsByIdRecoveredQueues}. There are 
three methods which
  * modify it, {@link #removePeer(String)} ,
  * {@link #cleanOldLogs(String, boolean, ReplicationSourceInterface)} and

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e709cf1/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
index 8dee012..8822f29 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
@@ -27,7 +27,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.PrivateCellUtil;
@@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.client.RegionInfo;
 import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
-import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -53,6 +51,32 @@ class DisabledWALProvider implements WALProvider {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(DisabledWALProvider.class);
 
+  private static final WALIdentity DISABLED_WAL_IDENTITY = new WALIdentity() {
+@Override
+public int compareTo(WALIdentity o) {
+  if (equals(o)) {
+return 0;
+  }
+  return 1;
+}
+
+@Override
+public String getName() {
+  return "disabled-wal-id";
+

[23/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
index 9aa9b59..ac7e0ea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
@@ -28,520 +28,565 @@
 020import static 
org.apache.hadoop.hbase.HConstants.NINES;
 021import static 
org.apache.hadoop.hbase.HConstants.ZEROES;
 022import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
-025import static 
org.apache.hadoop.hbase.client.RegionInfo.createRegionName;
-026import static 
org.apache.hadoop.hbase.util.Bytes.BYTES_COMPARATOR;
-027import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
-028
-029import java.io.IOException;
-030import java.util.Arrays;
-031import java.util.HashSet;
-032import java.util.Iterator;
-033import java.util.LinkedHashMap;
-034import java.util.Map;
-035import java.util.Optional;
-036import java.util.Set;
-037import 
java.util.concurrent.CompletableFuture;
-038import 
java.util.concurrent.ConcurrentHashMap;
-039import 
java.util.concurrent.ConcurrentMap;
-040import 
java.util.concurrent.ConcurrentNavigableMap;
-041import 
java.util.concurrent.ConcurrentSkipListMap;
-042import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.HRegionLocation;
-045import 
org.apache.hadoop.hbase.MetaTableAccessor;
-046import 
org.apache.hadoop.hbase.RegionLocations;
-047import 
org.apache.hadoop.hbase.TableName;
-048import 
org.apache.hadoop.hbase.TableNotFoundException;
-049import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-050import 
org.apache.hadoop.hbase.util.Bytes;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import org.slf4j.Logger;
-053import org.slf4j.LoggerFactory;
-054
-055import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-056
-057/**
-058 * The asynchronous locator for regions 
other than meta.
-059 */
-060@InterfaceAudience.Private
-061class AsyncNonMetaRegionLocator {
-062
-063  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncNonMetaRegionLocator.class);
+023import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.canUpdateOnError;
+024import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.createRegionLocations;
+025import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.isGood;
+026import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.mergeRegionLocations;
+027import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.removeRegionLocation;
+028import static 
org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
+029import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+030import static 
org.apache.hadoop.hbase.client.RegionInfo.createRegionName;
+031import static 
org.apache.hadoop.hbase.util.Bytes.BYTES_COMPARATOR;
+032import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
+033
+034import java.io.IOException;
+035import java.util.Arrays;
+036import java.util.HashSet;
+037import java.util.Iterator;
+038import java.util.LinkedHashMap;
+039import java.util.Map;
+040import java.util.Optional;
+041import java.util.Set;
+042import 
java.util.concurrent.CompletableFuture;
+043import 
java.util.concurrent.ConcurrentHashMap;
+044import 
java.util.concurrent.ConcurrentMap;
+045import 
java.util.concurrent.ConcurrentNavigableMap;
+046import 
java.util.concurrent.ConcurrentSkipListMap;
+047import 
org.apache.commons.lang3.ObjectUtils;
+048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+049import 
org.apache.hadoop.hbase.HBaseIOException;
+050import 
org.apache.hadoop.hbase.HConstants;
+051import 
org.apache.hadoop.hbase.HRegionLocation;
+052import 
org.apache.hadoop.hbase.MetaTableAccessor;
+053import 
org.apache.hadoop.hbase.RegionLocations;
+054import 
org.apache.hadoop.hbase.TableName;
+055import 
org.apache.hadoop.hbase.TableNotFoundException;
+056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
+057import 
org.apache.hadoop.hbase.util.Bytes;
+058import 
org.apache.yetus.audience.InterfaceAudience;
+059import org.slf4j.Logger;
+060import org.slf4j.LoggerFactory;
+061
+062import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+063import 
org.apache.hbase.thirdparty.com.google.common.base.Objects;
 064
-065  @VisibleForTesting
-066  static final 

[34/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
index 6a2d4c7..1912a82 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":9,"i36":9,"i37":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":9,"i25":9,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":9,"i40":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,11 +114,11 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class RawAsyncTableImpl
+class RawAsyncTableImpl
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncTableAdvancedScanResultConsumer
 The implementation of RawAsyncTable.
- 
+ 
  The word 'Raw' means that this is a low level class. The returned https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFuture will
  be finished inside the rpc framework thread, which means that the callbacks 
registered to the
  https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFuture will also be executed 
inside the rpc framework thread. So users who use
@@ -202,38 +202,42 @@ implements defaultScannerMaxResultSize
 
 
+private static org.slf4j.Logger
+LOG
+
+
 private int
 maxAttempts
 
-
+
 private long
 operationTimeoutNs
 
-
+
 private long
 pauseNs
 
-
+
 private long
 readRpcTimeoutNs
 
-
+
 private long
 rpcTimeoutNs
 
-
+
 private long
 scanTimeoutNs
 
-
+
 private int
 startLogErrorsCnt
 
-
+
 private TableName
 tableName
 
-
+
 private long
 writeRpcTimeoutNs
 
@@ -305,6 +309,11 @@ implements 
+private Tvoid
+connect(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureTsrcFuture,
+   https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureTdstFuture)
+
+
 S,RAsyncTable.CoprocessorServiceBuilderS,R
 coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in 
java.util.function">Functioncom.google.protobuf.RpcChannel,SstubMaker,
   ServiceCallerS,Rcallable,
@@ -312,7 +321,7 @@ implements Execute a coprocessor call on the regions which are covered 
by a range.
 
 
-
+
 S,Rhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureR
 coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in 
java.util.function">Functioncom.google.protobuf.RpcChannel,SstubMaker,
   ServiceCallerS,Rcallable,
@@ -320,98 +329,104 @@ implements Execute the given coprocessor call on the region which 
contains the given row.
 
 
-
+
 private S,Rhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureR
 coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in 
java.util.function">Functioncom.google.protobuf.RpcChannel,SstubMaker,
   ServiceCallerS,Rcallable,
   RegionInforegion,
   byte[]row)
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 

[11/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
-098import 

[24/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
index 9aa9b59..ac7e0ea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
@@ -28,520 +28,565 @@
 020import static 
org.apache.hadoop.hbase.HConstants.NINES;
 021import static 
org.apache.hadoop.hbase.HConstants.ZEROES;
 022import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
-025import static 
org.apache.hadoop.hbase.client.RegionInfo.createRegionName;
-026import static 
org.apache.hadoop.hbase.util.Bytes.BYTES_COMPARATOR;
-027import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
-028
-029import java.io.IOException;
-030import java.util.Arrays;
-031import java.util.HashSet;
-032import java.util.Iterator;
-033import java.util.LinkedHashMap;
-034import java.util.Map;
-035import java.util.Optional;
-036import java.util.Set;
-037import 
java.util.concurrent.CompletableFuture;
-038import 
java.util.concurrent.ConcurrentHashMap;
-039import 
java.util.concurrent.ConcurrentMap;
-040import 
java.util.concurrent.ConcurrentNavigableMap;
-041import 
java.util.concurrent.ConcurrentSkipListMap;
-042import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.HRegionLocation;
-045import 
org.apache.hadoop.hbase.MetaTableAccessor;
-046import 
org.apache.hadoop.hbase.RegionLocations;
-047import 
org.apache.hadoop.hbase.TableName;
-048import 
org.apache.hadoop.hbase.TableNotFoundException;
-049import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-050import 
org.apache.hadoop.hbase.util.Bytes;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import org.slf4j.Logger;
-053import org.slf4j.LoggerFactory;
-054
-055import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-056
-057/**
-058 * The asynchronous locator for regions 
other than meta.
-059 */
-060@InterfaceAudience.Private
-061class AsyncNonMetaRegionLocator {
-062
-063  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncNonMetaRegionLocator.class);
+023import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.canUpdateOnError;
+024import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.createRegionLocations;
+025import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.isGood;
+026import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.mergeRegionLocations;
+027import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.removeRegionLocation;
+028import static 
org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
+029import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+030import static 
org.apache.hadoop.hbase.client.RegionInfo.createRegionName;
+031import static 
org.apache.hadoop.hbase.util.Bytes.BYTES_COMPARATOR;
+032import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
+033
+034import java.io.IOException;
+035import java.util.Arrays;
+036import java.util.HashSet;
+037import java.util.Iterator;
+038import java.util.LinkedHashMap;
+039import java.util.Map;
+040import java.util.Optional;
+041import java.util.Set;
+042import 
java.util.concurrent.CompletableFuture;
+043import 
java.util.concurrent.ConcurrentHashMap;
+044import 
java.util.concurrent.ConcurrentMap;
+045import 
java.util.concurrent.ConcurrentNavigableMap;
+046import 
java.util.concurrent.ConcurrentSkipListMap;
+047import 
org.apache.commons.lang3.ObjectUtils;
+048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+049import 
org.apache.hadoop.hbase.HBaseIOException;
+050import 
org.apache.hadoop.hbase.HConstants;
+051import 
org.apache.hadoop.hbase.HRegionLocation;
+052import 
org.apache.hadoop.hbase.MetaTableAccessor;
+053import 
org.apache.hadoop.hbase.RegionLocations;
+054import 
org.apache.hadoop.hbase.TableName;
+055import 
org.apache.hadoop.hbase.TableNotFoundException;
+056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
+057import 
org.apache.hadoop.hbase.util.Bytes;
+058import 
org.apache.yetus.audience.InterfaceAudience;
+059import org.slf4j.Logger;
+060import org.slf4j.LoggerFactory;
+061
+062import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+063import 
org.apache.hbase.thirdparty.com.google.common.base.Objects;

[48/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 9bedc3e..9456ee3 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1978,6 +1978,10 @@
 
 Listen for failures to a given process.
 
+addListener(CompletableFutureT,
 BiConsumer? super T, ? super Throwable) - Static method in 
class org.apache.hadoop.hbase.util.FutureUtils
+
+This is method is used when you just want to add a listener 
to the given future.
+
 addLiveNode(String,
 long, int, int) - Method in class 
org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel
 
 Add a live node to the cluster representation.
@@ -1991,6 +1995,10 @@
 Add an interceptor on the calls to the 
namenode#getBlockLocations from the DFSClient
  linked to this FileSystem.
 
+addLocationToCache(HRegionLocation)
 - Method in class org.apache.hadoop.hbase.client.AsyncMetaRegionLocator
+
+addLocationToCache(HRegionLocation)
 - Method in class org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator
+
 addLogFile(String,
 String) - Method in class org.apache.hadoop.hbase.snapshot.SnapshotInfo.SnapshotStats
 
 Add the specified log file to the stats
@@ -2587,9 +2595,7 @@
 
 Add backup set (list of tables)
 
-addToCache(AsyncNonMetaRegionLocator.TableCache,
 HRegionLocation) - Method in class 
org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator
-
-addToCache(HRegionLocation)
 - Method in class org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator
+addToCache(AsyncNonMetaRegionLocator.TableCache,
 RegionLocations) - Method in class 
org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator
 
 addToCache(Result)
 - Method in class org.apache.hadoop.hbase.client.AsyncTableResultScanner
 
@@ -4228,6 +4234,12 @@
 
 AsyncRegionLocator(AsyncConnectionImpl,
 HashedWheelTimer) - Constructor for class 
org.apache.hadoop.hbase.client.AsyncRegionLocator
 
+AsyncRegionLocatorHelper - Class in org.apache.hadoop.hbase.client
+
+Helper class for asynchronous region locator.
+
+AsyncRegionLocatorHelper()
 - Constructor for class org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper
+
 AsyncRegistry - Interface in org.apache.hadoop.hbase.client
 
 Implementations hold cluster information such as this 
cluster's id, location of hbase:meta, etc..
@@ -4317,7 +4329,7 @@
 
 Retry caller for a single request, such as get, put, 
delete, etc.
 
-AsyncSingleRequestRpcRetryingCaller(HashedWheelTimer,
 AsyncConnectionImpl, TableName, byte[], RegionLocateType, 
AsyncSingleRequestRpcRetryingCaller.CallableT, long, int, long, long, 
int) - Constructor for class org.apache.hadoop.hbase.client.AsyncSingleRequestRpcRetryingCaller
+AsyncSingleRequestRpcRetryingCaller(HashedWheelTimer,
 AsyncConnectionImpl, TableName, byte[], int, RegionLocateType, 
AsyncSingleRequestRpcRetryingCaller.CallableT, long, int, long, long, 
int) - Constructor for class org.apache.hadoop.hbase.client.AsyncSingleRequestRpcRetryingCaller
 
 AsyncSingleRequestRpcRetryingCaller.CallableT - Interface in org.apache.hadoop.hbase.client
 
@@ -9346,7 +9358,7 @@
 
 We stop to retry when we have exhausted BOTH the number of 
tries and the time allocated.
 
-canUpdate(HRegionLocation,
 HRegionLocation) - Static method in class 
org.apache.hadoop.hbase.client.AsyncRegionLocator
+canUpdateOnError(HRegionLocation,
 HRegionLocation) - Static method in class 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper
 
 canUpdateTableDescriptor()
 - Method in class org.apache.hadoop.hbase.master.HMaster
 
@@ -12280,7 +12292,7 @@
 
 clearCompactionQueues(RpcController,
 AdminProtos.ClearCompactionQueuesRequest) - Method in class 
org.apache.hadoop.hbase.regionserver.RSRpcServices
 
-clearCompletedRequests(OptionalHRegionLocation)
 - Method in class org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator.TableCache
+clearCompletedRequests(OptionalRegionLocations)
 - Method in class org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator.TableCache
 
 clearCurrentRow()
 - Method in class org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher
 
@@ -15824,7 +15836,7 @@
 
 CompatStateSerializer(InputStream)
 - Constructor for class org.apache.hadoop.hbase.procedure2.ProcedureUtil.CompatStateSerializer
 
-complete(TableName,
 AsyncNonMetaRegionLocator.LocateRequest, HRegionLocation, 
Throwable) - Method in class org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator
+complete(TableName,
 AsyncNonMetaRegionLocator.LocateRequest, RegionLocations, 
Throwable) - Method in class org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator
 
 complete
 - Variable in class org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector
 
@@ -17085,6 +17097,8 @@
 
 connect(Configuration)
 - Method in class org.apache.hadoop.hbase.client.ClusterStatusListener.MulticastListener
 
+connect(CompletableFutureT,
 CompletableFutureT) - Method in class 

[29/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.RegionRequest.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.RegionRequest.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.RegionRequest.html
index cd0ff28..4f9947f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.RegionRequest.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.RegionRequest.html
@@ -31,161 +31,161 @@
 023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.resetController;
 024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
 025import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
-026
-027import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-028
-029import java.io.IOException;
-030import java.util.ArrayList;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.IdentityHashMap;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037import 
java.util.concurrent.CompletableFuture;
-038import 
java.util.concurrent.ConcurrentHashMap;
-039import 
java.util.concurrent.ConcurrentLinkedQueue;
-040import 
java.util.concurrent.ConcurrentMap;
-041import 
java.util.concurrent.ConcurrentSkipListMap;
-042import java.util.concurrent.TimeUnit;
-043import java.util.function.Supplier;
-044import java.util.stream.Collectors;
-045import java.util.stream.Stream;
-046
-047import 
org.apache.hadoop.hbase.CellScannable;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.HRegionLocation;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.yetus.audience.InterfaceAudience;
-053import org.slf4j.Logger;
-054import org.slf4j.LoggerFactory;
-055import 
org.apache.hadoop.hbase.client.MultiResponse.RegionResult;
-056import 
org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext;
-057import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-062import 
org.apache.hadoop.hbase.util.Bytes;
-063import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-064
-065/**
-066 * Retry caller for batch.
-067 * p
-068 * Notice that, the {@link 
#operationTimeoutNs} is the total time limit now which is the same with
-069 * other single operations
-070 * p
-071 * And the {@link #maxAttempts} is a 
limit for each single operation in the batch logically. In the
-072 * implementation, we will record a 
{@code tries} parameter for each operation group, and if it is
-073 * split to several groups when retrying, 
the sub groups will inherit the {@code tries}. You can
-074 * imagine that the whole retrying 
process is a tree, and the {@link #maxAttempts} is the limit of
-075 * the depth of the tree.
-076 */
-077@InterfaceAudience.Private
-078class 
AsyncBatchRpcRetryingCallerT {
-079
-080  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncBatchRpcRetryingCaller.class);
-081
-082  private final HashedWheelTimer 
retryTimer;
-083
-084  private final AsyncConnectionImpl 
conn;
-085
-086  private final TableName tableName;
-087
-088  private final ListAction 
actions;
-089
-090  private final 
ListCompletableFutureT futures;
-091
-092  private final 
IdentityHashMapAction, CompletableFutureT action2Future;
-093
-094  private final 
IdentityHashMapAction, ListThrowableWithExtraContext 
action2Errors;
-095
-096  private final long pauseNs;
-097
-098  private final int maxAttempts;
-099
-100  private final long 
operationTimeoutNs;
-101
-102  private final long rpcTimeoutNs;
-103
-104  private final int startLogErrorsCnt;
-105
-106  private final long startNs;
-107
-108  // we can not use HRegionLocation as 
the map key because the hashCode and equals method of
-109  // HRegionLocation only consider 
serverName.
-110  private static final class 
RegionRequest {
-111
-112public final HRegionLocation loc;
-113
-114public final 
ConcurrentLinkedQueueAction actions = new 
ConcurrentLinkedQueue();
-115
-116public RegionRequest(HRegionLocation 
loc) {
-117  this.loc = loc;
-118}
-119  }
-120
-121  private static final class 
ServerRequest {
-122
-123public final ConcurrentMapbyte[], 
RegionRequest actionsByRegion =
-124new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR);
-125
-126public void addAction(HRegionLocation 
loc, Action action) {
-127 

[22/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRegionLocator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
index dc4399b..a628974 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
@@ -26,148 +26,147 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021import static 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil.findException;
-022import static 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil.isMetaClearingException;
-023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
-026
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029import java.util.function.Consumer;
-030import java.util.function.Function;
-031import java.util.function.Supplier;
-032
-033import 
org.apache.hadoop.hbase.HRegionLocation;
-034import 
org.apache.hadoop.hbase.TableName;
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.exceptions.RegionMovedException;
-039import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-040import 
org.apache.hadoop.hbase.util.Bytes;
-041
-042/**
-043 * The asynchronous region locator.
-044 */
-045@InterfaceAudience.Private
-046class AsyncRegionLocator {
+021import static 
org.apache.hadoop.hbase.util.FutureUtils.addListener;
+022
+023import 
java.util.concurrent.CompletableFuture;
+024import java.util.concurrent.TimeUnit;
+025import java.util.function.Supplier;
+026import 
org.apache.hadoop.hbase.HBaseIOException;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.RegionException;
+029import 
org.apache.hadoop.hbase.RegionLocations;
+030import 
org.apache.hadoop.hbase.TableName;
+031import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
+032import 
org.apache.hadoop.hbase.util.Bytes;
+033import 
org.apache.yetus.audience.InterfaceAudience;
+034import org.slf4j.Logger;
+035import org.slf4j.LoggerFactory;
+036
+037import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+038import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
+039
+040/**
+041 * The asynchronous region locator.
+042 */
+043@InterfaceAudience.Private
+044class AsyncRegionLocator {
+045
+046  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncRegionLocator.class);
 047
-048  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncRegionLocator.class);
+048  private final HashedWheelTimer 
retryTimer;
 049
-050  private final HashedWheelTimer 
retryTimer;
+050  private final AsyncMetaRegionLocator 
metaRegionLocator;
 051
-052  private final AsyncMetaRegionLocator 
metaRegionLocator;
+052  private final AsyncNonMetaRegionLocator 
nonMetaRegionLocator;
 053
-054  private final AsyncNonMetaRegionLocator 
nonMetaRegionLocator;
-055
-056  AsyncRegionLocator(AsyncConnectionImpl 
conn, HashedWheelTimer retryTimer) {
-057this.metaRegionLocator = new 
AsyncMetaRegionLocator(conn.registry);
-058this.nonMetaRegionLocator = new 
AsyncNonMetaRegionLocator(conn);
-059this.retryTimer = retryTimer;
-060  }
-061
-062  private 
CompletableFutureHRegionLocation 
withTimeout(CompletableFutureHRegionLocation future,
-063  long timeoutNs, 
SupplierString timeoutMsg) {
-064if (future.isDone() || timeoutNs 
= 0) {
-065  return future;
-066}
-067Timeout timeoutTask = 
retryTimer.newTimeout(t - {
-068  if (future.isDone()) {
-069return;
-070  }
-071  future.completeExceptionally(new 
TimeoutIOException(timeoutMsg.get()));
-072}, timeoutNs, 
TimeUnit.NANOSECONDS);
-073return future.whenComplete((loc, 
error) - {
-074  if (error != null  
error.getClass() != TimeoutIOException.class) {
-075// cancel timeout task if we are 
not completed by it.
-076timeoutTask.cancel();
-077  }
-078});
-079  }
-080
-081  
CompletableFutureHRegionLocation getRegionLocation(TableName tableName, 
byte[] row,
-082  RegionLocateType type, boolean 
reload, long timeoutNs) {
-083// meta region can not be split right 
now so we always call the same method.
-084// Change it later if the meta table 
can have more than one regions.
-085
CompletableFutureHRegionLocation future =
-086tableName.equals(META_TABLE_NAME) 
? metaRegionLocator.getRegionLocation(reload)
-087: 
nonMetaRegionLocator.getRegionLocation(tableName, row, type, reload);
-088return withTimeout(future, 
timeoutNs,

[15/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] 

[38/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/ConnectionConfiguration.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionConfiguration.html 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionConfiguration.html
index cb2cf06..6b8596d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionConfiguration.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionConfiguration.html
@@ -165,6 +165,14 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 operationTimeout
 
 
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+PRIMARY_CALL_TIMEOUT_MICROSECOND
+
+
+static int
+PRIMARY_CALL_TIMEOUT_MICROSECOND_DEFAULT
+
+
 private int
 primaryCallTimeoutMicroSecond
 
@@ -465,13 +473,39 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+
+
+
+
+
+PRIMARY_CALL_TIMEOUT_MICROSECOND
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PRIMARY_CALL_TIMEOUT_MICROSECOND
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+PRIMARY_CALL_TIMEOUT_MICROSECOND_DEFAULT
+public static finalint PRIMARY_CALL_TIMEOUT_MICROSECOND_DEFAULT
+
+See Also:
+Constant
 Field Values
+
+
+
 
 
 
 
 
 writeBufferSize
-private finallong writeBufferSize
+private finallong writeBufferSize
 
 
 
@@ -480,7 +514,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 writeBufferPeriodicFlushTimeoutMs
-private finallong writeBufferPeriodicFlushTimeoutMs
+private finallong writeBufferPeriodicFlushTimeoutMs
 
 
 
@@ -489,7 +523,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 writeBufferPeriodicFlushTimerTickMs
-private finallong writeBufferPeriodicFlushTimerTickMs
+private finallong writeBufferPeriodicFlushTimerTickMs
 
 
 
@@ -498,7 +532,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 metaOperationTimeout
-private finalint metaOperationTimeout
+private finalint metaOperationTimeout
 
 
 
@@ -507,7 +541,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 operationTimeout
-private finalint operationTimeout
+private finalint operationTimeout
 
 
 
@@ -516,7 +550,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 scannerCaching
-private finalint scannerCaching
+private finalint scannerCaching
 
 
 
@@ -525,7 +559,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 scannerMaxResultSize
-private finallong scannerMaxResultSize
+private finallong scannerMaxResultSize
 
 
 
@@ -534,7 +568,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 primaryCallTimeoutMicroSecond
-private finalint primaryCallTimeoutMicroSecond
+private finalint primaryCallTimeoutMicroSecond
 
 
 
@@ -543,7 +577,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 replicaCallTimeoutMicroSecondScan
-private finalint replicaCallTimeoutMicroSecondScan
+private finalint replicaCallTimeoutMicroSecondScan
 
 
 
@@ -552,7 +586,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 metaReplicaCallTimeoutMicroSecondScan
-private finalint metaReplicaCallTimeoutMicroSecondScan
+private finalint metaReplicaCallTimeoutMicroSecondScan
 
 
 
@@ -561,7 +595,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 retries
-private finalint retries
+private finalint retries
 
 
 
@@ -570,7 +604,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 maxKeyValueSize
-private finalint maxKeyValueSize
+private finalint maxKeyValueSize
 
 
 
@@ -579,7 +613,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 rpcTimeout
-private finalint rpcTimeout
+private finalint rpcTimeout
 
 
 
@@ -588,7 +622,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 readRpcTimeout
-private finalint readRpcTimeout
+private finalint readRpcTimeout
 
 
 
@@ -597,7 +631,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 writeRpcTimeout
-private finalint writeRpcTimeout
+private finalint writeRpcTimeout
 
 
 
@@ -606,7 +640,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 clientScannerAsyncPrefetch
-private finalboolean clientScannerAsyncPrefetch
+private finalboolean clientScannerAsyncPrefetch
 
 
 
@@ -623,7 +657,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ConnectionConfiguration
-ConnectionConfiguration(org.apache.hadoop.conf.Configurationconf)
+ConnectionConfiguration(org.apache.hadoop.conf.Configurationconf)
 Constructor
 
 Parameters:
@@ -637,7 +671,7 @@ 

[45/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index e8886e1..b7af077 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2936,9 +2936,9 @@ service.
 
 
 private void
-AsyncNonMetaRegionLocator.complete(TableNametableName,
+AsyncNonMetaRegionLocator.complete(TableNametableName,
 AsyncNonMetaRegionLocator.LocateRequestreq,
-HRegionLocationloc,
+RegionLocationslocs,
 https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableerror)
 
 
@@ -3491,38 +3491,41 @@ service.
 
 
 (package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-AsyncNonMetaRegionLocator.getRegionLocation(TableNametableName,
+AsyncRegionLocator.getRegionLocation(TableNametableName,
  byte[]row,
- RegionLocateTypelocateType,
- booleanreload)
+ intreplicaId,
+ RegionLocateTypetype,
+ booleanreload,
+ longtimeoutNs)
 
 
 (package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+AsyncRegionLocator.getRegionLocation(TableNametableName,
+ byte[]row,
+ intreplicaId,
+ RegionLocateTypetype,
+ longtimeoutNs)
+
+
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
 AsyncRegionLocator.getRegionLocation(TableNametableName,
  byte[]row,
  RegionLocateTypetype,
  booleanreload,
  longtimeoutNs)
 
-
+
 (package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
 AsyncRegionLocator.getRegionLocation(TableNametableName,
  byte[]row,
  RegionLocateTypetype,
  longtimeoutNs)
 
-
-(package private) HRegionLocation
+
+(package private) RegionLocations
 AsyncNonMetaRegionLocator.getRegionLocationInCache(TableNametableName,
 byte[]row)
 
-
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-AsyncNonMetaRegionLocator.getRegionLocationInternal(TableNametableName,
- byte[]row,
- RegionLocateTypelocateType,
- booleanreload)
-
 
 (package private) static RegionLocations
 RpcRetryingCallerWithReadReplicas.getRegionLocations(booleanuseCache,
@@ -3540,147 +3543,171 @@ service.
   intreplicaId)
 
 
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+AsyncNonMetaRegionLocator.getRegionLocations(TableNametableName,
+  byte[]row,
+  intreplicaId,
+  RegionLocateTypelocateType,
+  booleanreload)
+
+
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+AsyncRegionLocator.getRegionLocations(TableNametableName,
+  byte[]row,
+  RegionLocateTypetype,
+  booleanreload,
+  longtimeoutNs)
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+AsyncNonMetaRegionLocator.getRegionLocationsInternal(TableNametableName,
+  byte[]row,
+  intreplicaId,
+  RegionLocateTypelocateType,
+  booleanreload)
+
+
 AsyncTableRegionLocator
 AsyncConnection.getRegionLocator(TableNametableName)
 Retrieve a AsyncRegionLocator implementation to inspect 
region information on a table.
 
 
-
+
 RegionLocator
 

[17/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 

[47/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 6bcb01c..e950510 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -347,22 +347,6 @@ service.
 
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapAsyncNonMetaRegionLocator.LocateRequest,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-AsyncNonMetaRegionLocator.TableCache.allRequests
-
-
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentNavigableMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentNavigableMapbyte[],HRegionLocation
-AsyncNonMetaRegionLocator.TableCache.cache
-
-
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceHRegionLocation
-AsyncMetaRegionLocator.metaRegionLocation
-
-
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferencehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-AsyncMetaRegionLocator.metaRelocateFuture
-
-
 private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapHRegionLocation,HTableMultiplexer.FlushWorker
 HTableMultiplexer.serverToFlushWorkerMap
 The map between each region server to its flush worker
@@ -378,6 +362,14 @@ service.
 
 
 
+private HRegionLocation
+AsyncNonMetaRegionLocator.getCachedLocation(HRegionLocationloc)
+
+
+private HRegionLocation
+AsyncMetaRegionLocator.getCacheLocation(HRegionLocationloc)
+
+
 protected HRegionLocation
 MultiServerCallable.getLocation()
 
@@ -430,21 +422,9 @@ service.
  booleanreload)
 
 
-(package private) HRegionLocation
-AsyncNonMetaRegionLocator.getRegionLocationInCache(TableNametableName,
-byte[]row)
-
-
 private HRegionLocation
 AsyncRequestFutureImpl.getReplicaLocationOrFail(Actionaction)
 
-
-private HRegionLocation
-AsyncNonMetaRegionLocator.locateInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
- TableNametableName,
- byte[]row,
- RegionLocateTypelocateType)
-
 
 HRegionLocation
 ClusterConnection.locateRegion(byte[]regionName)
@@ -469,18 +449,6 @@ service.
 byte[]row)
 
 
-private HRegionLocation
-AsyncNonMetaRegionLocator.locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
-  TableNametableName,
-  byte[]row)
-
-
-private HRegionLocation
-AsyncNonMetaRegionLocator.locateRowInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
-TableNametableName,
-byte[]row)
-
-
 HRegionLocation
 ClusterConnection.relocateRegion(TableNametableName,
   byte[]row)
@@ -533,38 +501,60 @@ service.
 
 
 (package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-AsyncMetaRegionLocator.getRegionLocation(booleanreload)
-
-
-(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
 RawAsyncHBaseAdmin.getRegionLocation(byte[]regionNameOrEncodedRegionName)
 Get the region location for the passed region name.
 
 
-
+
 default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
 AsyncTableRegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
+
+default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+AsyncTableRegionLocator.getRegionLocation(byte[]row,
+ booleanreload)
+Finds the region on which the given row is being 
served.
+
+
 
+default 

[05/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 

[02/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MasterRpcCall.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
-098import 

[35/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
index 7ab3056..27e1af4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private final class RawAsyncTableImpl.CheckAndMutateBuilderImpl
+private final class RawAsyncTableImpl.CheckAndMutateBuilderImpl
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncTable.CheckAndMutateBuilder
 
@@ -265,7 +265,7 @@ implements 
 
 row
-private finalbyte[] row
+private finalbyte[] row
 
 
 
@@ -274,7 +274,7 @@ implements 
 
 family
-private finalbyte[] family
+private finalbyte[] family
 
 
 
@@ -283,7 +283,7 @@ implements 
 
 qualifier
-privatebyte[] qualifier
+privatebyte[] qualifier
 
 
 
@@ -292,7 +292,7 @@ implements 
 
 timeRange
-privateTimeRange timeRange
+privateTimeRange timeRange
 
 
 
@@ -301,7 +301,7 @@ implements 
 
 op
-privateCompareOperator op
+privateCompareOperator op
 
 
 
@@ -310,7 +310,7 @@ implements 
 
 value
-privatebyte[] value
+privatebyte[] value
 
 
 
@@ -327,7 +327,7 @@ implements 
 
 CheckAndMutateBuilderImpl
-publicCheckAndMutateBuilderImpl(byte[]row,
+publicCheckAndMutateBuilderImpl(byte[]row,
  byte[]family)
 
 
@@ -345,7 +345,7 @@ implements 
 
 qualifier
-publicAsyncTable.CheckAndMutateBuilderqualifier(byte[]qualifier)
+publicAsyncTable.CheckAndMutateBuilderqualifier(byte[]qualifier)
 
 Specified by:
 qualifierin
 interfaceAsyncTable.CheckAndMutateBuilder
@@ -360,7 +360,7 @@ implements 
 
 timeRange
-publicAsyncTable.CheckAndMutateBuildertimeRange(TimeRangetimeRange)
+publicAsyncTable.CheckAndMutateBuildertimeRange(TimeRangetimeRange)
 
 Specified by:
 timeRangein
 interfaceAsyncTable.CheckAndMutateBuilder
@@ -375,7 +375,7 @@ implements 
 
 ifNotExists
-publicAsyncTable.CheckAndMutateBuilderifNotExists()
+publicAsyncTable.CheckAndMutateBuilderifNotExists()
 Description copied from 
interface:AsyncTable.CheckAndMutateBuilder
 Check for lack of column.
 
@@ -390,7 +390,7 @@ implements 
 
 ifMatches
-publicAsyncTable.CheckAndMutateBuilderifMatches(CompareOperatorcompareOp,
+publicAsyncTable.CheckAndMutateBuilderifMatches(CompareOperatorcompareOp,
   byte[]value)
 
 Specified by:
@@ -407,7 +407,7 @@ implements 
 
 preCheck
-privatevoidpreCheck()
+privatevoidpreCheck()
 
 
 
@@ -416,7 +416,7 @@ implements 
 
 thenPut
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanthenPut(Putput)
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanthenPut(Putput)
 
 Specified by:
 thenPutin
 interfaceAsyncTable.CheckAndMutateBuilder
@@ -434,7 +434,7 @@ implements 
 
 thenDelete
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanthenDelete(Deletedelete)
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanthenDelete(Deletedelete)
 
 Specified by:
 thenDeletein
 interfaceAsyncTable.CheckAndMutateBuilder
@@ -452,7 +452,7 @@ implements 
 
 thenMutate
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanthenMutate(RowMutationsmutation)

[27/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
index cd0ff28..4f9947f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
@@ -31,161 +31,161 @@
 023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.resetController;
 024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
 025import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
-026
-027import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-028
-029import java.io.IOException;
-030import java.util.ArrayList;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.IdentityHashMap;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037import 
java.util.concurrent.CompletableFuture;
-038import 
java.util.concurrent.ConcurrentHashMap;
-039import 
java.util.concurrent.ConcurrentLinkedQueue;
-040import 
java.util.concurrent.ConcurrentMap;
-041import 
java.util.concurrent.ConcurrentSkipListMap;
-042import java.util.concurrent.TimeUnit;
-043import java.util.function.Supplier;
-044import java.util.stream.Collectors;
-045import java.util.stream.Stream;
-046
-047import 
org.apache.hadoop.hbase.CellScannable;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.HRegionLocation;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.yetus.audience.InterfaceAudience;
-053import org.slf4j.Logger;
-054import org.slf4j.LoggerFactory;
-055import 
org.apache.hadoop.hbase.client.MultiResponse.RegionResult;
-056import 
org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext;
-057import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-062import 
org.apache.hadoop.hbase.util.Bytes;
-063import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-064
-065/**
-066 * Retry caller for batch.
-067 * p
-068 * Notice that, the {@link 
#operationTimeoutNs} is the total time limit now which is the same with
-069 * other single operations
-070 * p
-071 * And the {@link #maxAttempts} is a 
limit for each single operation in the batch logically. In the
-072 * implementation, we will record a 
{@code tries} parameter for each operation group, and if it is
-073 * split to several groups when retrying, 
the sub groups will inherit the {@code tries}. You can
-074 * imagine that the whole retrying 
process is a tree, and the {@link #maxAttempts} is the limit of
-075 * the depth of the tree.
-076 */
-077@InterfaceAudience.Private
-078class 
AsyncBatchRpcRetryingCallerT {
-079
-080  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncBatchRpcRetryingCaller.class);
-081
-082  private final HashedWheelTimer 
retryTimer;
-083
-084  private final AsyncConnectionImpl 
conn;
-085
-086  private final TableName tableName;
-087
-088  private final ListAction 
actions;
-089
-090  private final 
ListCompletableFutureT futures;
-091
-092  private final 
IdentityHashMapAction, CompletableFutureT action2Future;
-093
-094  private final 
IdentityHashMapAction, ListThrowableWithExtraContext 
action2Errors;
-095
-096  private final long pauseNs;
-097
-098  private final int maxAttempts;
-099
-100  private final long 
operationTimeoutNs;
-101
-102  private final long rpcTimeoutNs;
-103
-104  private final int startLogErrorsCnt;
-105
-106  private final long startNs;
-107
-108  // we can not use HRegionLocation as 
the map key because the hashCode and equals method of
-109  // HRegionLocation only consider 
serverName.
-110  private static final class 
RegionRequest {
-111
-112public final HRegionLocation loc;
-113
-114public final 
ConcurrentLinkedQueueAction actions = new 
ConcurrentLinkedQueue();
-115
-116public RegionRequest(HRegionLocation 
loc) {
-117  this.loc = loc;
-118}
-119  }
-120
-121  private static final class 
ServerRequest {
-122
-123public final ConcurrentMapbyte[], 
RegionRequest actionsByRegion =
-124new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR);
-125
-126public void addAction(HRegionLocation 
loc, Action action) {
-127  computeIfAbsent(actionsByRegion, 

[07/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 

[25/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
index 9aa9b59..ac7e0ea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
@@ -28,520 +28,565 @@
 020import static 
org.apache.hadoop.hbase.HConstants.NINES;
 021import static 
org.apache.hadoop.hbase.HConstants.ZEROES;
 022import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
-025import static 
org.apache.hadoop.hbase.client.RegionInfo.createRegionName;
-026import static 
org.apache.hadoop.hbase.util.Bytes.BYTES_COMPARATOR;
-027import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
-028
-029import java.io.IOException;
-030import java.util.Arrays;
-031import java.util.HashSet;
-032import java.util.Iterator;
-033import java.util.LinkedHashMap;
-034import java.util.Map;
-035import java.util.Optional;
-036import java.util.Set;
-037import 
java.util.concurrent.CompletableFuture;
-038import 
java.util.concurrent.ConcurrentHashMap;
-039import 
java.util.concurrent.ConcurrentMap;
-040import 
java.util.concurrent.ConcurrentNavigableMap;
-041import 
java.util.concurrent.ConcurrentSkipListMap;
-042import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.HRegionLocation;
-045import 
org.apache.hadoop.hbase.MetaTableAccessor;
-046import 
org.apache.hadoop.hbase.RegionLocations;
-047import 
org.apache.hadoop.hbase.TableName;
-048import 
org.apache.hadoop.hbase.TableNotFoundException;
-049import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-050import 
org.apache.hadoop.hbase.util.Bytes;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import org.slf4j.Logger;
-053import org.slf4j.LoggerFactory;
-054
-055import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-056
-057/**
-058 * The asynchronous locator for regions 
other than meta.
-059 */
-060@InterfaceAudience.Private
-061class AsyncNonMetaRegionLocator {
-062
-063  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncNonMetaRegionLocator.class);
+023import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.canUpdateOnError;
+024import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.createRegionLocations;
+025import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.isGood;
+026import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.mergeRegionLocations;
+027import static 
org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.removeRegionLocation;
+028import static 
org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
+029import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+030import static 
org.apache.hadoop.hbase.client.RegionInfo.createRegionName;
+031import static 
org.apache.hadoop.hbase.util.Bytes.BYTES_COMPARATOR;
+032import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
+033
+034import java.io.IOException;
+035import java.util.Arrays;
+036import java.util.HashSet;
+037import java.util.Iterator;
+038import java.util.LinkedHashMap;
+039import java.util.Map;
+040import java.util.Optional;
+041import java.util.Set;
+042import 
java.util.concurrent.CompletableFuture;
+043import 
java.util.concurrent.ConcurrentHashMap;
+044import 
java.util.concurrent.ConcurrentMap;
+045import 
java.util.concurrent.ConcurrentNavigableMap;
+046import 
java.util.concurrent.ConcurrentSkipListMap;
+047import 
org.apache.commons.lang3.ObjectUtils;
+048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+049import 
org.apache.hadoop.hbase.HBaseIOException;
+050import 
org.apache.hadoop.hbase.HConstants;
+051import 
org.apache.hadoop.hbase.HRegionLocation;
+052import 
org.apache.hadoop.hbase.MetaTableAccessor;
+053import 
org.apache.hadoop.hbase.RegionLocations;
+054import 
org.apache.hadoop.hbase.TableName;
+055import 
org.apache.hadoop.hbase.TableNotFoundException;
+056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
+057import 
org.apache.hadoop.hbase.util.Bytes;
+058import 
org.apache.yetus.audience.InterfaceAudience;
+059import org.slf4j.Logger;
+060import org.slf4j.LoggerFactory;
+061
+062import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+063import 

[16/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] 

[44/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
index d442d56..b228f8d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AsyncBatchRpcRetryingCallerT
+class AsyncBatchRpcRetryingCallerT
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Retry caller for batch.
  
@@ -387,7 +387,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -396,7 +396,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 retryTimer
-private 
finalorg.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer retryTimer
+private 
finalorg.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer retryTimer
 
 
 
@@ -405,7 +405,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 conn
-private finalAsyncConnectionImpl conn
+private finalAsyncConnectionImpl conn
 
 
 
@@ -414,7 +414,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tableName
-private finalTableName tableName
+private finalTableName tableName
 
 
 
@@ -423,7 +423,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 actions
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListAction actions
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListAction actions
 
 
 
@@ -432,7 +432,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 futures
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT futures
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT futures
 
 
 
@@ -441,7 +441,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 action2Future
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/IdentityHashMap.html?is-external=true;
 title="class or interface in java.util">IdentityHashMapAction,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT action2Future
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/IdentityHashMap.html?is-external=true;
 title="class or interface in java.util">IdentityHashMapAction,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT action2Future
 
 
 
@@ -450,7 +450,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 action2Errors
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/IdentityHashMap.html?is-external=true;
 title="class or interface in java.util">IdentityHashMapAction,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRetriesExhaustedException.ThrowableWithExtraContext
 action2Errors
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/IdentityHashMap.html?is-external=true;
 title="class or interface in java.util">IdentityHashMapAction,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRetriesExhaustedException.ThrowableWithExtraContext
 action2Errors
 
 
 
@@ -459,7 +459,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 pauseNs
-private finallong pauseNs
+private finallong pauseNs
 
 
 
@@ -468,7 +468,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 maxAttempts
-private finalint maxAttempts
+private finalint maxAttempts
 
 
 
@@ -477,7 +477,7 @@ extends 

[41/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
index 196bc95..5937d40 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AsyncRegionLocator
+class AsyncRegionLocator
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 The asynchronous region locator.
 
@@ -175,51 +175,67 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 Method Summary
 
-All MethodsStatic MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
 
 
-(package private) static boolean
-canUpdate(HRegionLocationloc,
- HRegionLocationoldLoc)
-
-
 (package private) void
 clearCache(TableNametableName)
 
-
+
 (package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-getRegionLocation(TableNametableName,
+getRegionLocation(TableNametableName,
  byte[]row,
+ intreplicaId,
  RegionLocateTypetype,
  booleanreload,
  longtimeoutNs)
 
+
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+getRegionLocation(TableNametableName,
+ byte[]row,
+ intreplicaId,
+ RegionLocateTypetype,
+ longtimeoutNs)
+
 
 (package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-getRegionLocation(TableNametableName,
+getRegionLocation(TableNametableName,
  byte[]row,
  RegionLocateTypetype,
+ booleanreload,
  longtimeoutNs)
 
 
-(package private) void
-updateCachedLocation(HRegionLocationloc,
-https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableexception)
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+getRegionLocation(TableNametableName,
+ byte[]row,
+ RegionLocateTypetype,
+ longtimeoutNs)
 
 
-(package private) static void
-updateCachedLocation(HRegionLocationloc,
-https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwableexception,
-https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionHRegionLocation,HRegionLocationcachedLocationSupplier,
-https://docs.oracle.com/javase/8/docs/api/java/util/function/Consumer.html?is-external=true;
 title="class or interface in java.util.function">ConsumerHRegionLocationaddToCache,
-https://docs.oracle.com/javase/8/docs/api/java/util/function/Consumer.html?is-external=true;
 title="class or interface in java.util.function">ConsumerHRegionLocationremoveFromCache)
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+getRegionLocations(TableNametableName,
+  byte[]row,
+  RegionLocateTypetype,
+  booleanreload,
+  

[14/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] row) {
+086  this.row = row;
+087  return this;
+088}
+089
+090public 
SingleRequestCallerBuilderT action(
+091

[09/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 

[01/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site b5ab7a738 -> 2bf59208a


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 

[43/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
index 0c27135..6fef27f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AsyncMetaRegionLocator
+class AsyncMetaRegionLocator
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 The asynchronous locator for meta region.
 
@@ -136,11 +136,11 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 LOG
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceHRegionLocation
-metaRegionLocation
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceRegionLocations
+metaRegionLocations
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferencehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferencehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
 metaRelocateFuture
 
 
@@ -180,17 +180,32 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Method and Description
 
 
-(package private) void
-clearCache()
+private void
+addLocationToCache(HRegionLocationloc)
 
 
-(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-getRegionLocation(booleanreload)
+(package private) void
+clearCache()
 
 
+private HRegionLocation
+getCacheLocation(HRegionLocationloc)
+
+
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+getRegionLocations(intreplicaId,
+  booleanreload)
+Get the region locations for meta region.
+
+
+
+private void
+removeLocationFromCache(HRegionLocationloc)
+
+
 (package private) void
-updateCachedLocation(HRegionLocationloc,
-https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableexception)
+updateCachedLocationOnError(HRegionLocationloc,
+   https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableexception)
 
 
 
@@ -220,7 +235,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -229,16 +244,16 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 registry
-private finalAsyncRegistry registry
+private finalAsyncRegistry registry
 
 
-
+
 
 
 
 
-metaRegionLocation
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceHRegionLocation metaRegionLocation
+metaRegionLocations
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceRegionLocations metaRegionLocations
 
 
 
@@ -247,7 +262,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 metaRelocateFuture
-private 

[32/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
index 2b10b8f..bdc9077 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
@@ -106,6 +106,7 @@
 AsyncProcessTask.Builder
 AsyncProcessTask.ListRowAccess
 AsyncRegionLocator
+AsyncRegionLocatorHelper
 AsyncRegistryFactory
 AsyncRequestFutureImpl
 AsyncRequestFutureImpl.ReplicaResultState
@@ -210,6 +211,23 @@
 Query
 QuotaStatusCalls
 RawAsyncHBaseAdmin
+RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
+RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer
+RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer
+RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer
+RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer
+RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer
+RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer
+RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.ModifyTableProcedureBiConsumer
+RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.ProcedureBiConsumer
+RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer
+RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer
+RawAsyncHBaseAdmin.TableProcedureBiConsumer
+RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer
 RawAsyncTableImpl
 RegionAdminServiceCallable
 RegionCoprocessorRpcChannel

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
index 917a200..f594746 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -602,87 +602,93 @@
 
 
 
+AsyncRegionLocatorHelper
+
+Helper class for asynchronous region locator.
+
+
+
 AsyncRegistryFactory
 
 Get instance of configured Registry.
 
 
-
+
 AsyncRequestFutureImplCResult
 
 The context, and return value, for a single 
submit/submitAll call.
 
 
-
+
 AsyncRequestFutureImpl.ReplicaResultState
 
 Sync point for calls to multiple replicas for the same user 
request (Get).
 
 
-
+
 AsyncRpcRetryingCallerT
 
 
-
+
 AsyncRpcRetryingCallerFactory
 
 Factory to create an AsyncRpcRetryCaller.
 
 
-
+
 AsyncScanSingleRegionRpcRetryingCaller
 
 Retry caller for scanning a region.
 
 
-
+
 AsyncServerRequestRpcRetryingCallerT
 
 Retry caller for a request call to region server.
 
 
-
+
 AsyncSingleRequestRpcRetryingCallerT
 
 Retry caller for a single request, such as get, put, 
delete, etc.
 
 
-
+
 AsyncTableBuilderBaseC 
extends ScanResultConsumerBase
 
 Base class for all asynchronous table builders.
 
 
-
+
 AsyncTableImpl
 
 Just a wrapper of RawAsyncTableImpl.
 
 
-
+
 AsyncTableRegionLocatorImpl
 
 The implementation of AsyncRegionLocator.
 
 
-
+
 AsyncTableResultScanner
 
 The ResultScanner implementation 
for AsyncTable.
 
 
-
+
 BatchErrors
 
 
-
+
 BatchScanResultCache
 
 A scan result cache for batched scan, i.e,
  scan.getBatch()  0  
!scan.getAllowPartialResults().
 
 
-
+
 BufferedMutatorImpl
 
 
@@ -690,137 +696,137 @@
  but meant for batched, potentially asynchronous puts.
 
 
-
+
 BufferedMutatorParams
 
 Parameters for instantiating a BufferedMutator.
 
 
-
+
 CancellableRegionServerCallableT
 
 This class is used to unify HTable calls with AsyncProcess 
Framework.
 
 
-
+
 ClientAsyncPrefetchScanner
 
 ClientAsyncPrefetchScanner implements async scanner 
behaviour.
 
 
-
+
 ClientCoprocessorRpcController
 
 Client side rpc controller for coprocessor 
implementation.
 
 
-
+
 ClientIdGenerator
 
 The class that is able to determine some unique strings for 
the client,
  such as an IP address, PID, and composite deterministic ID.
 
 
-
+
 ClientScanner
 
 Implements the scanner interface for the HBase client.
 
 
-
+
 ClientServiceCallableT
 
 A RegionServerCallable set to use the Client protocol.
 
 
-
+
 ClientSideRegionScanner
 
 A client scanner for a region opened for read-only on the 
client side.
 
 
-
+
 ClientSimpleScanner
 
 ClientSimpleScanner implements a sync scanner 
behaviour.
 
 
-
+
 ClientUtil
 
 
-
+
 ClusterStatusListener
 
 A class that receives the cluster status, and provide it as 
a set of service to the client.
 
 
-
+
 ColumnFamilyDescriptorBuilder
 
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 
 An ModifyableFamilyDescriptor contains information about a 
column family such as the
  number of 

[36/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 4527b5b..5060801 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class RawAsyncHBaseAdmin
+class RawAsyncHBaseAdmin
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -150,7 +150,7 @@ implements Class and Description
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
 
 
@@ -162,19 +162,19 @@ implements RawAsyncHBaseAdmin.ConverterD,S
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer
 
 
@@ -182,11 +182,11 @@ implements RawAsyncHBaseAdmin.DeleteTableProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer
 
 
@@ -194,35 +194,35 @@ implements RawAsyncHBaseAdmin.MasterRpcCallRESP,REQ
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.ModifyTableProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.ProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer
 
 
@@ -230,11 +230,11 @@ implements RawAsyncHBaseAdmin.TableOperator
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.TableProcedureBiConsumer
 
 
-private class
+private static class
 RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer
 
 
@@ -1460,7 +1460,7 @@ implements 
 
 FLUSH_TABLE_PROCEDURE_SIGNATURE
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE
 
 See Also:
 Constant
 Field Values
@@ -1473,7 +1473,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -1482,7 +1482,7 @@ implements 
 
 connection
-private finalAsyncConnectionImpl connection
+private finalAsyncConnectionImpl connection
 
 
 
@@ -1491,7 +1491,7 @@ implements 
 
 retryTimer
-private 
finalorg.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer retryTimer
+private 
finalorg.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer retryTimer
 
 
 
@@ -1500,7 +1500,7 @@ implements 
 
 metaTable
-private finalAsyncTableAdvancedScanResultConsumer metaTable
+private finalAsyncTableAdvancedScanResultConsumer metaTable
 
 
 
@@ -1509,7 +1509,7 @@ implements 
 
 rpcTimeoutNs
-private finallong rpcTimeoutNs
+private finallong rpcTimeoutNs
 
 
 
@@ -1518,7 +1518,7 @@ implements 
 
 operationTimeoutNs
-private finallong operationTimeoutNs
+private finallong operationTimeoutNs
 
 
 
@@ -1527,7 +1527,7 @@ implements 
 
 pauseNs
-private finallong pauseNs
+private finallong pauseNs
 
 
 
@@ -1536,7 +1536,7 @@ implements 
 
 maxAttempts
-private finalint maxAttempts
+private finalint maxAttempts
 
 
 
@@ -1545,7 +1545,7 @@ implements 
 
 startLogErrorsCnt
-private finalint startLogErrorsCnt
+private finalint startLogErrorsCnt
 
 
 
@@ -1554,7 +1554,7 @@ implements 
 
 ng
-private finalNonceGenerator ng
+private finalNonceGenerator ng
 
 
 
@@ -1571,7 +1571,7 @@ implements 
 
 RawAsyncHBaseAdmin
-RawAsyncHBaseAdmin(AsyncConnectionImplconnection,
+RawAsyncHBaseAdmin(AsyncConnectionImplconnection,

org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer,
AsyncAdminBuilderBasebuilder)
 
@@ -1590,7 +1590,7 

[30/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
index 98ae775..1ce7bf0 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -711,182 +711,188 @@
 
 
 
+FutureUtils
+
+Helper class for processing futures.
+
+
+
 GetJavaProperty
 
 A generic way for querying Java properties.
 
 
-
+
 Hash
 
 This class represents a common API for hashing 
functions.
 
 
-
+
 HashedBytes
 
 This class encapsulates a byte array and overrides hashCode 
and equals so
  that it's identity is based on the data rather than the array instance.
 
 
-
+
 HashKeyT
 
 Used to calculate the hash Hash algorithms for 
Bloomfilters.
 
 
-
+
 HasThread
 
 Abstract class which contains a Thread and delegates the 
common Thread
  methods to that instance.
 
 
-
+
 HBaseConfTool
 
 Tool that prints out a configuration.
 
 
-
+
 HBaseFsck
 
 HBaseFsck (hbck) is a tool for checking and repairing 
region consistency and
  table integrity problems in a corrupted HBase.
 
 
-
+
 HBaseFsck.FileLockCallable
 
 
-
+
 HBaseFsck.HBaseFsckTool
 
 This is a Tool wrapper that gathers -Dxxx=yyy configuration 
settings from the command line.
 
 
-
+
 HBaseFsck.HbckInfo
 
 Maintain information about a particular region.
 
 
-
+
 HBaseFsck.HdfsEntry
 
 Stores the regioninfo entries from HDFS
 
 
-
+
 HBaseFsck.MetaEntry
 
 Stores the regioninfo entries scanned from META
 
 
-
+
 HBaseFsck.OnlineEntry
 
 Stores the regioninfo retrieved from Online region 
servers.
 
 
-
+
 HBaseFsck.PrintingErrorReporter
 
 
-
+
 HBaseFsck.RegionBoundariesInformation
 
 
-
+
 HBaseFsck.WorkItemHdfsRegionInfo
 
 Contact hdfs and get all information about specified table 
directory into
  regioninfo list.
 
 
-
+
 HBaseFsck.WorkItemOverlapMerge
 
 
-
+
 HBaseFsck.WorkItemRegion
 
 Contact a region server and get all information from 
it
 
 
-
+
 HBaseFsckRepair
 
 This class contains helper methods that repair parts of 
hbase's filesystem
  contents.
 
 
-
+
 HFileArchiveUtil
 
 Helper class for all utilities related to 
archival/retrieval of HFiles
 
 
-
+
 IdLock
 
 Allows multiple concurrent clients to lock on a numeric id 
with a minimal
  memory overhead.
 
 
-
+
 IdLock.Entry
 
 An entry returned to the client as a lock object
 
 
-
+
 IdReadWriteLockT
 
 Allows multiple concurrent clients to lock on a numeric id 
with ReentrantReadWriteLock.
 
 
-
+
 ImmutableByteArray
 
 Mainly used as keys for HashMap.
 
 
-
+
 IncrementingEnvironmentEdge
 
 Uses an incrementing algorithm instead of the default.
 
 
-
+
 JenkinsHash
 
 Produces 32-bit hash for hash table lookup.
 
 
-
+
 JRubyFormat
 
 Utility class for converting objects to JRuby.
 
 
-
+
 JSONBean
 
 Utility for doing JSON and MBeans.
 
 
-
+
 JsonMapper
 
 Utility class for converting objects to JSON
 
 
-
+
 JSONMetricUtil
 
 
-
+
 JVM
 
 This class is a wrapper for the implementation of
@@ -895,97 +901,97 @@
  depending on the runtime (vendor) used.
 
 
-
+
 JVMClusterUtil
 
 Utility used running a cluster all in the one JVM.
 
 
-
+
 JVMClusterUtil.MasterThread
 
 Datastructure to hold Master Thread and Master 
instance
 
 
-
+
 JVMClusterUtil.RegionServerThread
 
 Datastructure to hold RegionServer Thread and RegionServer 
instance
 
 
-
+
 JvmPauseMonitor
 
 Class which sets up a simple thread which runs in a loop 
sleeping
  for a short interval of time.
 
 
-
+
 JvmPauseMonitor.GcTimes
 
 
-
+
 JvmVersion
 
 Certain JVM versions are known to be unstable with 
HBase.
 
 
-
+
 KeyLockerK
 
 A utility class to manage a set of locks.
 
 
-
+
 LossyCounting
 
 LossyCounting utility, bounded data structure that 
maintains approximate high frequency
  elements in data stream.
 
 
-
+
 ManualEnvironmentEdge
 
 An environment edge that uses a manually set value.
 
 
-
+
 MapreduceDependencyClasspathTool
 
 Generate a classpath string containing any jars required by 
mapreduce jobs.
 
 
-
+
 MapReduceExtendedCell
 
 A wrapper for a cell to be used with mapreduce, as the 
output value class for mappers/reducers.
 
 
-
+
 MD5Hash
 
 Utility class for MD5
  MD5 hash produces a 128-bit digest.
 
 
-
+
 Methods
 
 
-
+
 ModifyRegionUtils
 
 Utility methods for interacting with the regions.
 
 
-
+
 MultiHConnection
 
 Provides ability to create multiple Connection instances 
and allows to process a batch of
  actions using CHTable.doBatchWithCallback()
 
 
-
+
 MunkresAssignment
 
 Computes the optimal (minimal cost) assignment of jobs to 
workers (or other
@@ -996,126 +1002,126 @@
  Problem: An Improved Version of Munkres' Algorithm".
 
 
-
+
 MurmurHash
 
 This is a very fast, non-cryptographic hash suitable for 
general hash-based
  lookup.
 
 
-
+
 

[20/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] row) {
+086  this.row = row;
+087   

[31/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 243c9e4..19b5553 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -293,9 +293,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper.Counters
 org.apache.hadoop.hbase.mapreduce.CellCounter.CellCounterMapper.Counters
 org.apache.hadoop.hbase.mapreduce.SyncTable.SyncMapper.Counter
+org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper.Counters
 org.apache.hadoop.hbase.mapreduce.TableSplit.Version
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
index c49c655..848d41f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
@@ -198,8 +198,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
 org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type
+org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 625c97b..bff0e70 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -347,10 +347,10 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.MetricsMasterSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus
 org.apache.hadoop.hbase.master.RegionState.State
 org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
+org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus
+org.apache.hadoop.hbase.master.MetricsMasterSourceFactoryImpl.FactoryStorage
 org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 5fd1c3a..653e33c 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -216,9 +216,9 @@
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or 

[10/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
-098import 

[46/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
index c504a6d..cbf2d81 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
@@ -197,11 +197,27 @@
 
 
 
+private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapAsyncNonMetaRegionLocator.LocateRequest,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+AsyncNonMetaRegionLocator.TableCache.allRequests
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentNavigableMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentNavigableMapbyte[],RegionLocations
+AsyncNonMetaRegionLocator.TableCache.cache
+
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMapTableName,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentNavigableMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentNavigableMapbyte[],RegionLocations
 MetaCache.cachedRegionLocations
 Map of table to table HRegionLocations.
 
 
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceRegionLocations
+AsyncMetaRegionLocator.metaRegionLocations
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferencehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+AsyncMetaRegionLocator.metaRelocateFuture
+
 
 
 
@@ -212,24 +228,33 @@
 
 
 
+(package private) static RegionLocations
+AsyncRegionLocatorHelper.createRegionLocations(HRegionLocationloc)
+
+
 private RegionLocations
 AsyncRequestFutureImpl.findAllLocationsOrFail(Actionaction,
   booleanuseCache)
 
-
+
 RegionLocations
 MetaCache.getCachedLocation(TableNametableName,
  byte[]row)
 Search the cache for a location that fits our table and row 
key.
 
 
-
+
 (package private) RegionLocations
 ConnectionImplementation.getCachedLocation(TableNametableName,
  byte[]row)
 Search the cache for a location that fits our table and row 
key.
 
 
+
+(package private) RegionLocations
+AsyncNonMetaRegionLocator.getRegionLocationInCache(TableNametableName,
+byte[]row)
+
 
 (package private) static RegionLocations
 RpcRetryingCallerWithReadReplicas.getRegionLocations(booleanuseCache,
@@ -248,25 +273,33 @@
 
 
 private RegionLocations
+AsyncNonMetaRegionLocator.locateInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
+ TableNametableName,
+ byte[]row,
+ intreplicaId,
+ RegionLocateTypelocateType)
+
+
+private RegionLocations
 ConnectionImplementation.locateMeta(TableNametableName,
   booleanuseCache,
   intreplicaId)
 
-
+
 RegionLocations
 ClusterConnection.locateRegion(TableNametableName,
 byte[]row,
 booleanuseCache,
 booleanretry)
 
-
+
 RegionLocations
 ConnectionImplementation.locateRegion(TableNametableName,
 byte[]row,
 booleanuseCache,
 booleanretry)
 
-
+
 RegionLocations
 ClusterConnection.locateRegion(TableNametableName,
 byte[]row,
@@ -274,7 +307,7 @@
 booleanretry,
 intreplicaId)
 
-
+
 RegionLocations
 ConnectionImplementation.locateRegion(TableNametableName,
 byte[]row,
@@ -282,7 +315,7 @@
 booleanretry,
 intreplicaId)
 
-
+
 private RegionLocations
 ConnectionImplementation.locateRegionInMeta(TableNametableName,
   byte[]row,
@@ -293,6 +326,28 @@
  seeking.
 
 
+
+private RegionLocations
+AsyncNonMetaRegionLocator.locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
+  TableNametableName,
+  byte[]row,
+  intreplicaId)
+
+
+private RegionLocations
+AsyncNonMetaRegionLocator.locateRowInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
+TableNametableName,
+byte[]row,
+intreplicaId)
+
+

[26/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
index 779b02a..e91f4fe 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
@@ -47,149 +47,161 @@
 039import static 
org.apache.hadoop.hbase.HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY;
 040import static 
org.apache.hadoop.hbase.client.AsyncProcess.DEFAULT_START_LOG_ERRORS_AFTER_COUNT;
 041import static 
org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-042import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_PERIODIC_FLUSH_TIMEOUT_MS;
-043import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_PERIODIC_FLUSH_TIMEOUT_MS_DEFAULT;
-044import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_SIZE_DEFAULT;
-045import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_SIZE_KEY;
-046
-047import java.util.concurrent.TimeUnit;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.yetus.audience.InterfaceAudience;
-051
-052/**
-053 * Timeout configs.
-054 */
-055@InterfaceAudience.Private
-056class AsyncConnectionConfiguration {
-057
-058  private final long 
metaOperationTimeoutNs;
+042import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.PRIMARY_CALL_TIMEOUT_MICROSECOND;
+043import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.PRIMARY_CALL_TIMEOUT_MICROSECOND_DEFAULT;
+044import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_PERIODIC_FLUSH_TIMEOUT_MS;
+045import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_PERIODIC_FLUSH_TIMEOUT_MS_DEFAULT;
+046import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_SIZE_DEFAULT;
+047import static 
org.apache.hadoop.hbase.client.ConnectionConfiguration.WRITE_BUFFER_SIZE_KEY;
+048
+049import java.util.concurrent.TimeUnit;
+050import 
org.apache.hadoop.conf.Configuration;
+051import 
org.apache.hadoop.hbase.HBaseConfiguration;
+052import 
org.apache.yetus.audience.InterfaceAudience;
+053
+054/**
+055 * Timeout configs.
+056 */
+057@InterfaceAudience.Private
+058class AsyncConnectionConfiguration {
 059
-060  // timeout for a whole operation such 
as get, put or delete. Notice that scan will not be effected
-061  // by this value, see scanTimeoutNs.
-062  private final long 
operationTimeoutNs;
-063
-064  // timeout for each rpc request. Can be 
overridden by a more specific config, such as
-065  // readRpcTimeout or writeRpcTimeout.
-066  private final long rpcTimeoutNs;
-067
-068  // timeout for each read rpc request
-069  private final long readRpcTimeoutNs;
-070
-071  // timeout for each write rpc request
-072  private final long writeRpcTimeoutNs;
-073
-074  private final long pauseNs;
+060  private final long 
metaOperationTimeoutNs;
+061
+062  // timeout for a whole operation such 
as get, put or delete. Notice that scan will not be effected
+063  // by this value, see scanTimeoutNs.
+064  private final long 
operationTimeoutNs;
+065
+066  // timeout for each rpc request. Can be 
overridden by a more specific config, such as
+067  // readRpcTimeout or writeRpcTimeout.
+068  private final long rpcTimeoutNs;
+069
+070  // timeout for each read rpc request
+071  private final long readRpcTimeoutNs;
+072
+073  // timeout for each write rpc request
+074  private final long writeRpcTimeoutNs;
 075
-076  private final int maxRetries;
+076  private final long pauseNs;
 077
-078  /** How many retries are allowed before 
we start to log */
-079  private final int startLogErrorsCnt;
-080
-081  // As now we have heartbeat support for 
scan, ideally a scan will never timeout unless the RS is
-082  // crash. The RS will always return 
something before the rpc timeout or scan timeout to tell the
-083  // client that it is still alive. The 
scan timeout is used as operation timeout for every
-084  // operations in a scan, such as 
openScanner or next.
-085  private final long scanTimeoutNs;
-086
-087  private final int scannerCaching;
+078  private final int maxRetries;
+079
+080  /** How many retries are allowed before 
we start to log */
+081  private final int startLogErrorsCnt;
+082
+083  // As now we have heartbeat support for 
scan, ideally a scan will never timeout unless the RS is
+084  // crash. The RS will always return 
something before the rpc timeout or scan timeout to tell the
+085  // client that it is still alive. The 

[39/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
index 9d8ddf8..fe2eebc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderT
+public class AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderT
 extends AsyncRpcRetryingCallerFactory.BuilderBase
 
 
@@ -260,7 +260,7 @@ extends 
 
 callable
-privateAsyncServerRequestRpcRetryingCaller.CallableT callable
+privateAsyncServerRequestRpcRetryingCaller.CallableT callable
 
 
 
@@ -269,7 +269,7 @@ extends 
 
 operationTimeoutNs
-privatelong operationTimeoutNs
+privatelong operationTimeoutNs
 
 
 
@@ -278,7 +278,7 @@ extends 
 
 rpcTimeoutNs
-privatelong rpcTimeoutNs
+privatelong rpcTimeoutNs
 
 
 
@@ -287,7 +287,7 @@ extends 
 
 serverName
-privateServerName serverName
+privateServerName serverName
 
 
 
@@ -304,7 +304,7 @@ extends 
 
 ServerRequestCallerBuilder
-publicServerRequestCallerBuilder()
+publicServerRequestCallerBuilder()
 
 
 
@@ -321,7 +321,7 @@ extends 
 
 action
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTaction(AsyncServerRequestRpcRetryingCaller.CallableTcallable)
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTaction(AsyncServerRequestRpcRetryingCaller.CallableTcallable)
 
 
 
@@ -330,7 +330,7 @@ extends 
 
 operationTimeout
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderToperationTimeout(longoperationTimeout,
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderToperationTimeout(longoperationTimeout,

 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
 
 
@@ -340,7 +340,7 @@ extends 
 
 rpcTimeout
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTrpcTimeout(longrpcTimeout,
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTrpcTimeout(longrpcTimeout,
   
https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
 
 
@@ -350,7 +350,7 @@ extends 
 
 pause
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTpause(longpause,
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTpause(longpause,
  https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
 
 
@@ -360,7 +360,7 @@ extends 
 
 maxAttempts
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTmaxAttempts(intmaxAttempts)
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTmaxAttempts(intmaxAttempts)
 
 
 
@@ -369,7 +369,7 @@ extends 
 
 startLogErrorsCnt
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTstartLogErrorsCnt(intstartLogErrorsCnt)
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTstartLogErrorsCnt(intstartLogErrorsCnt)
 
 
 
@@ -378,7 +378,7 @@ extends 
 
 serverName
-publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTserverName(ServerNameserverName)
+publicAsyncRpcRetryingCallerFactory.ServerRequestCallerBuilderTserverName(ServerNameserverName)
 
 
 
@@ -387,7 +387,7 @@ extends 
 
 build
-publicAsyncServerRequestRpcRetryingCallerTbuild()
+publicAsyncServerRequestRpcRetryingCallerTbuild()
 
 
 
@@ -396,7 +396,7 @@ extends 
 
 call
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTcall()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTcall()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
--
diff --git 

[42/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
index c389981..22973b5 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AsyncNonMetaRegionLocator
+class AsyncNonMetaRegionLocator
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 The asynchronous locator for regions other than meta.
 
@@ -223,13 +223,13 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Method and Description
 
 
-private boolean
-addToCache(AsyncNonMetaRegionLocator.TableCachetableCache,
-  HRegionLocationloc)
+private void
+addLocationToCache(HRegionLocationloc)
 
 
-private void
-addToCache(HRegionLocationloc)
+private boolean
+addToCache(AsyncNonMetaRegionLocator.TableCachetableCache,
+  RegionLocationslocs)
 
 
 (package private) void
@@ -237,72 +237,86 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 private void
-complete(TableNametableName,
+complete(TableNametableName,
 AsyncNonMetaRegionLocator.LocateRequestreq,
-HRegionLocationloc,
+RegionLocationslocs,
 https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableerror)
 
 
-(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-getRegionLocation(TableNametableName,
- byte[]row,
- RegionLocateTypelocateType,
- booleanreload)
+private HRegionLocation
+getCachedLocation(HRegionLocationloc)
 
 
-(package private) HRegionLocation
+(package private) RegionLocations
 getRegionLocationInCache(TableNametableName,
 byte[]row)
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
-getRegionLocationInternal(TableNametableName,
- byte[]row,
- RegionLocateTypelocateType,
- booleanreload)
+(package private) https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+getRegionLocations(TableNametableName,
+  byte[]row,
+  intreplicaId,
+  RegionLocateTypelocateType,
+  booleanreload)
 
 
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
+getRegionLocationsInternal(TableNametableName,
+  byte[]row,
+  intreplicaId,
+  RegionLocateTypelocateType,
+  booleanreload)
+
+
 private AsyncNonMetaRegionLocator.TableCache
 getTableCache(TableNametableName)
 
-
-private HRegionLocation
-locateInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
+
+private boolean
+isEqual(RegionLocationslocs1,
+   RegionLocationslocs2)
+
+
+private RegionLocations
+locateInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
  TableNametableName,
  byte[]row,
+ intreplicaId,
  RegionLocateTypelocateType)
 
-
+
 private void
 locateInMeta(TableNametableName,
 AsyncNonMetaRegionLocator.LocateRequestreq)
 
-
-private HRegionLocation
-locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
+
+private RegionLocations
+locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
   TableNametableName,
-  byte[]row)
+  

[49/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 117e755..07451b4 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2019 The Apache Software Foundation
 
-  File: 3840,
- Errors: 14666,
+  File: 3844,
+ Errors: 14559,
  Warnings: 0,
  Infos: 0
   
@@ -1609,7 +1609,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -5184,6 +5184,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.AsyncRegionLocatorHelper.java;>org/apache/hadoop/hbase/client/AsyncRegionLocatorHelper.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.TestObservedExceptionsInBatch.java;>org/apache/hadoop/hbase/regionserver/TestObservedExceptionsInBatch.java
 
 
@@ -6803,7 +6817,7 @@ under the License.
   0
 
 
-  17
+  0
 
   
   
@@ -12109,7 +12123,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -14391,7 +14405,7 @@ under the License.
   0
 
 
-  6
+  3
 
   
   
@@ -16398,6 +16412,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TestAsyncTableRegionReplicasGet.java;>org/apache/hadoop/hbase/client/TestAsyncTableRegionReplicasGet.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor.java;>org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessor.java
 
 
@@ -16454,6 +16482,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.FutureUtils.java;>org/apache/hadoop/hbase/util/FutureUtils.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mob.ExpiredMobFileCleaner.java;>org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
 
 
@@ -18745,7 +18787,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -23916,6 +23958,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.RegionReplicaTestHelper.java;>org/apache/hadoop/hbase/client/RegionReplicaTestHelper.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coprocessor.RegionObserver.java;>org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
 
@@ -33613,7 +33669,7 @@ under the License.
   0
 
 
-  87
+  15
 
   
   
@@ -47963,7 +48019,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -49615,7 +49671,7 @@ under 

[12/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 

[19/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] row) {
+086  this.row = row;
+087  return this;
+088}
+089

[21/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] row) 

[40/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
index af1f280..f284d16 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderT
+public class AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderT
 extends AsyncRpcRetryingCallerFactory.BuilderBase
 
 
@@ -260,7 +260,7 @@ extends 
 
 callable
-privateAsyncAdminRequestRetryingCaller.CallableT callable
+privateAsyncAdminRequestRetryingCaller.CallableT callable
 
 
 
@@ -269,7 +269,7 @@ extends 
 
 operationTimeoutNs
-privatelong operationTimeoutNs
+privatelong operationTimeoutNs
 
 
 
@@ -278,7 +278,7 @@ extends 
 
 rpcTimeoutNs
-privatelong rpcTimeoutNs
+privatelong rpcTimeoutNs
 
 
 
@@ -287,7 +287,7 @@ extends 
 
 serverName
-privateServerName serverName
+privateServerName serverName
 
 
 
@@ -304,7 +304,7 @@ extends 
 
 AdminRequestCallerBuilder
-publicAdminRequestCallerBuilder()
+publicAdminRequestCallerBuilder()
 
 
 
@@ -321,7 +321,7 @@ extends 
 
 action
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTaction(AsyncAdminRequestRetryingCaller.CallableTcallable)
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTaction(AsyncAdminRequestRetryingCaller.CallableTcallable)
 
 
 
@@ -330,7 +330,7 @@ extends 
 
 operationTimeout
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderToperationTimeout(longoperationTimeout,
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderToperationTimeout(longoperationTimeout,

https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
 
 
@@ -340,7 +340,7 @@ extends 
 
 rpcTimeout
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTrpcTimeout(longrpcTimeout,
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTrpcTimeout(longrpcTimeout,
  
https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
 
 
@@ -350,7 +350,7 @@ extends 
 
 pause
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTpause(longpause,
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTpause(longpause,
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
 
 
@@ -360,7 +360,7 @@ extends 
 
 maxAttempts
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTmaxAttempts(intmaxAttempts)
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTmaxAttempts(intmaxAttempts)
 
 
 
@@ -369,7 +369,7 @@ extends 
 
 startLogErrorsCnt
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTstartLogErrorsCnt(intstartLogErrorsCnt)
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTstartLogErrorsCnt(intstartLogErrorsCnt)
 
 
 
@@ -378,7 +378,7 @@ extends 
 
 serverName
-publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTserverName(ServerNameserverName)
+publicAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTserverName(ServerNameserverName)
 
 
 
@@ -387,7 +387,7 @@ extends 
 
 build
-publicAsyncAdminRequestRetryingCallerTbuild()
+publicAsyncAdminRequestRetryingCallerTbuild()
 
 
 
@@ -396,7 +396,7 @@ extends 
 
 call
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTcall()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTcall()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
 

[50/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 5203a73..7fee945 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -291,10 +291,10 @@
 Warnings
 Errors
 
-3840
+3844
 0
 0
-14666
+14559
 
 Files
 
@@ -1112,7 +1112,7 @@
 org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java
 0
 0
-6
+3
 
 org/apache/hadoop/hbase/client/AsyncClientScanner.java
 0
@@ -1134,16 +1134,6 @@
 0
 2
 
-org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
-0
-0
-1
-
-org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
-0
-0
-4
-
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
@@ -1154,35 +1144,25 @@
 0
 2
 
-org/apache/hadoop/hbase/client/AsyncRegionLocator.java
-0
-0
-2
-
 org/apache/hadoop/hbase/client/AsyncRegistryFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java
 0
 0
 3
-
-org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java
-0
-0
-4
 
 org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
 0
@@ -1194,520 +1174,510 @@
 0
 2
 
-org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java
-0
-0
-4
-
 org/apache/hadoop/hbase/client/AsyncTableImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncTableResultScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BatchErrors.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BatchScanResultCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferingScanResultConsumer.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/ClientServiceCallable.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ClientSimpleScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientUtil.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClusterConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
 0
 0
 54
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/CompleteScanResultCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionFactory.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/ConnectionUtils.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
 96
-
+
 org/apache/hadoop/hbase/client/HBaseHbck.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
 50
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ImmutableHRegionInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/Increment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MetaCache.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/client/MultiAction.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/MultiResponse.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Operation.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/Put.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/Query.java
 0
 0
 10
-
+
 

[37/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer.html
index e55a049..28d0066 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer
+private static class RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer
 extends RawAsyncHBaseAdmin.ProcedureBiConsumer
 
 
@@ -237,7 +237,7 @@ extends 
 
 peerId
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String peerId
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String peerId
 
 
 
@@ -246,7 +246,7 @@ extends 
 
 getOperation
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/function/Supplier.html?is-external=true;
 title="class or interface in java.util.function">Supplierhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String getOperation
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/function/Supplier.html?is-external=true;
 title="class or interface in java.util.function">Supplierhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String getOperation
 
 
 
@@ -263,7 +263,7 @@ extends 
 
 ReplicationProcedureBiConsumer
-ReplicationProcedureBiConsumer(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+ReplicationProcedureBiConsumer(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
https://docs.oracle.com/javase/8/docs/api/java/util/function/Supplier.html?is-external=true;
 title="class or interface in java.util.function">Supplierhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperation)
 
 
@@ -281,7 +281,7 @@ extends 
 
 getDescription
-https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetDescription()
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetDescription()
 
 
 
@@ -290,7 +290,7 @@ extends 
 
 onFinished
-voidonFinished()
+voidonFinished()
 
 Specified by:
 onFinishedin
 classRawAsyncHBaseAdmin.ProcedureBiConsumer
@@ -303,7 +303,7 @@ extends 
 
 onError
-voidonError(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwableerror)
+voidonError(https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwableerror)
 
 Specified by:
 onErrorin
 classRawAsyncHBaseAdmin.ProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
index 83fa972..9929331 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer
+private static class RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer
 extends RawAsyncHBaseAdmin.TableProcedureBiConsumer
 
 
@@ -232,7 +232,7 @@ extends 
 
 SplitTableRegionProcedureBiConsumer
-SplitTableRegionProcedureBiConsumer(TableNametableName)
+SplitTableRegionProcedureBiConsumer(TableNametableName)
 
 
 
@@ -249,7 +249,7 @@ extends 
 
 getOperationType
-https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()

[33/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
index 1c79b4b..6ce642c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
@@ -237,10 +237,11 @@
AsyncServerRequestRpcRetryingCaller.CallableTcallable)
 
 
-AsyncSingleRequestRpcRetryingCaller(org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer,
+AsyncSingleRequestRpcRetryingCaller(org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer,
AsyncConnectionImplconn,
TableNametableName,
byte[]row,
+   intreplicaId,
RegionLocateTypelocateType,
AsyncSingleRequestRpcRetryingCaller.CallableTcallable,
longpauseNs,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.LocateRequest.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.LocateRequest.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.LocateRequest.html
index 77d960b..0db4667 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.LocateRequest.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.LocateRequest.html
@@ -105,11 +105,11 @@
 
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapAsyncNonMetaRegionLocator.LocateRequest,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapAsyncNonMetaRegionLocator.LocateRequest,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocations
 AsyncNonMetaRegionLocator.TableCache.allRequests
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetAsyncNonMetaRegionLocator.LocateRequest
+private https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetAsyncNonMetaRegionLocator.LocateRequest
 AsyncNonMetaRegionLocator.TableCache.pendingRequests
 
 
@@ -136,9 +136,9 @@
 
 
 private void
-AsyncNonMetaRegionLocator.complete(TableNametableName,
+AsyncNonMetaRegionLocator.complete(TableNametableName,
 AsyncNonMetaRegionLocator.LocateRequestreq,
-HRegionLocationloc,
+RegionLocationslocs,
 https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableerror)
 
 
@@ -163,8 +163,8 @@
 
 private boolean
 AsyncNonMetaRegionLocator.TableCache.tryComplete(AsyncNonMetaRegionLocator.LocateRequestreq,
-   https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocationfuture,
-   https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalHRegionLocationlocation)
+   https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureRegionLocationsfuture,
+   https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionLocationslocations)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.TableCache.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.TableCache.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncNonMetaRegionLocator.TableCache.html
index c2b9481..3efcd98 100644
--- 

hbase-site git commit: INFRA-10751 Empty commit

2019-01-03 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 2bf59208a -> 5a2137b52


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/5a2137b5
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/5a2137b5
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/5a2137b5

Branch: refs/heads/asf-site
Commit: 5a2137b522850a86e9e3e9b1d8709344bd922dfd
Parents: 2bf5920
Author: jenkins 
Authored: Thu Jan 3 14:53:10 2019 +
Committer: jenkins 
Committed: Thu Jan 3 14:53:10 2019 +

--

--




[13/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.Callable.html
index 6dc4edd..56e66a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.Callable.html
@@ -25,107 +25,110 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+020import static 
org.apache.hadoop.hbase.util.FutureUtils.addListener;
 021
 022import java.io.IOException;
 023import 
java.util.concurrent.CompletableFuture;
-024
-025import 
org.apache.hadoop.hbase.HRegionLocation;
-026import 
org.apache.hadoop.hbase.TableName;
-027import 
org.apache.yetus.audience.InterfaceAudience;
-028import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-029import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-030import 
org.apache.hadoop.hbase.util.Bytes;
+024import 
org.apache.hadoop.hbase.HRegionLocation;
+025import 
org.apache.hadoop.hbase.TableName;
+026import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+027import 
org.apache.hadoop.hbase.util.Bytes;
+028import 
org.apache.yetus.audience.InterfaceAudience;
+029
+030import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
 031
-032/**
-033 * Retry caller for a single request, 
such as get, put, delete, etc.
-034 */
-035@InterfaceAudience.Private
-036class 
AsyncSingleRequestRpcRetryingCallerT extends 
AsyncRpcRetryingCallerT {
-037
-038  @FunctionalInterface
-039  public interface CallableT {
-040CompletableFutureT 
call(HBaseRpcController controller, HRegionLocation loc,
-041ClientService.Interface stub);
-042  }
-043
-044  private final TableName tableName;
+032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
+033
+034/**
+035 * Retry caller for a single request, 
such as get, put, delete, etc.
+036 */
+037@InterfaceAudience.Private
+038class 
AsyncSingleRequestRpcRetryingCallerT extends 
AsyncRpcRetryingCallerT {
+039
+040  @FunctionalInterface
+041  public interface CallableT {
+042CompletableFutureT 
call(HBaseRpcController controller, HRegionLocation loc,
+043ClientService.Interface stub);
+044  }
 045
-046  private final byte[] row;
+046  private final TableName tableName;
 047
-048  private final RegionLocateType 
locateType;
+048  private final byte[] row;
 049
-050  private final CallableT 
callable;
+050  private final int replicaId;
 051
-052  public 
AsyncSingleRequestRpcRetryingCaller(HashedWheelTimer retryTimer, 
AsyncConnectionImpl conn,
-053  TableName tableName, byte[] row, 
RegionLocateType locateType, CallableT callable,
-054  long pauseNs, int maxAttempts, long 
operationTimeoutNs, long rpcTimeoutNs,
-055  int startLogErrorsCnt) {
-056super(retryTimer, conn, pauseNs, 
maxAttempts, operationTimeoutNs, rpcTimeoutNs,
-057startLogErrorsCnt);
-058this.tableName = tableName;
-059this.row = row;
-060this.locateType = locateType;
-061this.callable = callable;
-062  }
-063
-064  private void call(HRegionLocation loc) 
{
-065ClientService.Interface stub;
-066try {
-067  stub = 
conn.getRegionServerStub(loc.getServerName());
-068} catch (IOException e) {
-069  onError(e,
-070() - "Get async stub to " + 
loc.getServerName() + " for '" + Bytes.toStringBinary(row)
-071+ "' in " + 
loc.getRegion().getEncodedName() + " of " + tableName + " failed",
-072err - 
conn.getLocator().updateCachedLocation(loc, err));
-073  return;
-074}
-075resetCallTimeout();
-076callable.call(controller, loc, 
stub).whenComplete(
-077  (result, error) - {
-078if (error != null) {
-079  onError(error,
-080() - "Call to " + 
loc.getServerName() + " for '" + Bytes.toStringBinary(row) + "' in "
-081+ 
loc.getRegion().getEncodedName() + " of " + tableName + " failed",
-082err - 
conn.getLocator().updateCachedLocation(loc, err));
-083  return;
-084}
-085future.complete(result);
-086  });
-087  }
-088
-089  @Override
-090  protected void doCall() {
-091long locateTimeoutNs;
-092if (operationTimeoutNs  0) {
-093  locateTimeoutNs = 
remainingTimeNs();
-094  if (locateTimeoutNs = 0) {
-095completeExceptionally();
-096return;
-097  }
-098} else {
-099  locateTimeoutNs = -1L;
-100}
-101conn.getLocator()
-102.getRegionLocation(tableName, 
row, locateType, 

[08/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 

[18/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] 

[28/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.ServerRequest.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.ServerRequest.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.ServerRequest.html
index cd0ff28..4f9947f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.ServerRequest.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.ServerRequest.html
@@ -31,161 +31,161 @@
 023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.resetController;
 024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
 025import static 
org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
-026
-027import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-028
-029import java.io.IOException;
-030import java.util.ArrayList;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.IdentityHashMap;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037import 
java.util.concurrent.CompletableFuture;
-038import 
java.util.concurrent.ConcurrentHashMap;
-039import 
java.util.concurrent.ConcurrentLinkedQueue;
-040import 
java.util.concurrent.ConcurrentMap;
-041import 
java.util.concurrent.ConcurrentSkipListMap;
-042import java.util.concurrent.TimeUnit;
-043import java.util.function.Supplier;
-044import java.util.stream.Collectors;
-045import java.util.stream.Stream;
-046
-047import 
org.apache.hadoop.hbase.CellScannable;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.HRegionLocation;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.yetus.audience.InterfaceAudience;
-053import org.slf4j.Logger;
-054import org.slf4j.LoggerFactory;
-055import 
org.apache.hadoop.hbase.client.MultiResponse.RegionResult;
-056import 
org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext;
-057import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-062import 
org.apache.hadoop.hbase.util.Bytes;
-063import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-064
-065/**
-066 * Retry caller for batch.
-067 * p
-068 * Notice that, the {@link 
#operationTimeoutNs} is the total time limit now which is the same with
-069 * other single operations
-070 * p
-071 * And the {@link #maxAttempts} is a 
limit for each single operation in the batch logically. In the
-072 * implementation, we will record a 
{@code tries} parameter for each operation group, and if it is
-073 * split to several groups when retrying, 
the sub groups will inherit the {@code tries}. You can
-074 * imagine that the whole retrying 
process is a tree, and the {@link #maxAttempts} is the limit of
-075 * the depth of the tree.
-076 */
-077@InterfaceAudience.Private
-078class 
AsyncBatchRpcRetryingCallerT {
-079
-080  private static final Logger LOG = 
LoggerFactory.getLogger(AsyncBatchRpcRetryingCaller.class);
-081
-082  private final HashedWheelTimer 
retryTimer;
-083
-084  private final AsyncConnectionImpl 
conn;
-085
-086  private final TableName tableName;
-087
-088  private final ListAction 
actions;
-089
-090  private final 
ListCompletableFutureT futures;
-091
-092  private final 
IdentityHashMapAction, CompletableFutureT action2Future;
-093
-094  private final 
IdentityHashMapAction, ListThrowableWithExtraContext 
action2Errors;
-095
-096  private final long pauseNs;
-097
-098  private final int maxAttempts;
-099
-100  private final long 
operationTimeoutNs;
-101
-102  private final long rpcTimeoutNs;
-103
-104  private final int startLogErrorsCnt;
-105
-106  private final long startNs;
-107
-108  // we can not use HRegionLocation as 
the map key because the hashCode and equals method of
-109  // HRegionLocation only consider 
serverName.
-110  private static final class 
RegionRequest {
-111
-112public final HRegionLocation loc;
-113
-114public final 
ConcurrentLinkedQueueAction actions = new 
ConcurrentLinkedQueue();
-115
-116public RegionRequest(HRegionLocation 
loc) {
-117  this.loc = loc;
-118}
-119  }
-120
-121  private static final class 
ServerRequest {
-122
-123public final ConcurrentMapbyte[], 
RegionRequest actionsByRegion =
-124new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR);
-125
-126public void addAction(HRegionLocation 
loc, Action action) {
-127 

[06/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 

[04/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 

[03/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import