hbase git commit: HBASE-20000 Remove the quantum logic in FairQueue, always put high priority queue in front

2018-02-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master d0f2d18ca -> c18e7a963


HBASE-2 Remove the quantum logic in FairQueue, always put high priority 
queue in front


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c18e7a96
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c18e7a96
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c18e7a96

Branch: refs/heads/master
Commit: c18e7a963d9c4dc862c4706f128a4e436111669c
Parents: d0f2d18
Author: zhangduo 
Authored: Thu Feb 15 13:49:54 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 13:49:54 2018 +0800

--
 .../org/apache/hadoop/hbase/util/AvlUtil.java   |  14 +
 .../hbase/master/procedure/FairQueue.java   |  80 +++
 .../master/procedure/MasterProcedureEnv.java|   2 +-
 .../procedure/MasterProcedureScheduler.java | 560 +--
 .../master/procedure/MasterProcedureUtil.java   |  38 +-
 .../hbase/master/procedure/PeerQueue.java   |  54 ++
 .../hadoop/hbase/master/procedure/Queue.java| 115 
 .../hbase/master/procedure/SchemaLocking.java   | 214 +++
 .../hbase/master/procedure/ServerQueue.java |  43 ++
 .../hbase/master/procedure/TableQueue.java  |  89 +++
 ...ProcedureSchedulerPerformanceEvaluation.java |   2 +-
 .../procedure/TestMasterProcedureScheduler.java |  35 +-
 ...TestMasterProcedureSchedulerConcurrency.java |   6 +-
 13 files changed, 676 insertions(+), 576 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c18e7a96/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
index 7823360..6b6eaef 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
@@ -549,6 +549,20 @@ public final class AvlUtil {
 }
 
 /**
+ * @param head the head of the linked list
+ * @param base the node which we want to add the {@code node} before it
+ * @param node the node which we want to add it before the {@code base} 
node
+ */
+public static  TNode prepend(TNode head, 
TNode base, TNode node) {
+  assert !isLinked(node) : node + " is already linked";
+  node.iterNext = base;
+  node.iterPrev = base.iterPrev;
+  base.iterPrev.iterNext = node;
+  base.iterPrev = node;
+  return head == base ? node : head;
+}
+
+/**
  * @param node the node to check
  * @return true if the node is linked to a list, false otherwise
  */

http://git-wip-us.apache.org/repos/asf/hbase/blob/c18e7a96/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/FairQueue.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/FairQueue.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/FairQueue.java
new file mode 100644
index 000..ac8e577
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/FairQueue.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.hbase.util.AvlUtil.AvlIterableList;
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class FairQueue> {
+
+  private Queue queueHead = null;
+  private int size = 0;
+
+  public boolean hasRunnables() {
+return size > 0;
+  }
+
+  public void add(Queue queue) {
+// For normal priority queue, just append it to the tail
+if (queueHead == null || queue.getPriority() == 1) {
+  queueHead = AvlIterableList.append(queueHead, queue);
+  size++;
+  return;
+}
+// Find the one which priority is less than us
+// For now only 

[17/30] hbase git commit: HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes it so hbase1 can't open hbase2 written hfiles; fix

2018-02-14 Thread zhangduo
HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes 
it so hbase1 can't open hbase2 written hfiles; fix

Serializing, if appropriate, write the hbase-1.x version of the
Comparator to the hfile trailer so hbase-1.x files can read hbase-2.x
hfiles (they are the same format).


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8d26736b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8d26736b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8d26736b

Branch: refs/heads/HBASE-19064
Commit: 8d26736bc2b0c28efd5caa3be7d8c9037dba633a
Parents: 2f1b3ea
Author: Michael Stack 
Authored: Tue Feb 13 10:28:45 2018 -0800
Committer: Michael Stack 
Committed: Wed Feb 14 07:46:30 2018 -0800

--
 .../hadoop/hbase/io/hfile/FixedFileTrailer.java | 86 ++--
 .../hbase/io/hfile/TestFixedFileTrailer.java| 18 +++-
 2 files changed, 77 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8d26736b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index a0d3df3..55b2ee0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
@@ -185,34 +186,37 @@ public class FixedFileTrailer {
 baos.writeTo(outputStream);
   }
 
-  /**
-   * Write trailer data as protobuf
-   * @param outputStream
-   * @throws IOException
-   */
-  void serializeAsPB(DataOutputStream output) throws IOException {
-ByteArrayOutputStream baos = new ByteArrayOutputStream();
+  @org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting
+  HFileProtos.FileTrailerProto toProtobuf() {
 HFileProtos.FileTrailerProto.Builder builder = 
HFileProtos.FileTrailerProto.newBuilder()
-  .setFileInfoOffset(fileInfoOffset)
-  .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
-  .setUncompressedDataIndexSize(uncompressedDataIndexSize)
-  .setTotalUncompressedBytes(totalUncompressedBytes)
-  .setDataIndexCount(dataIndexCount)
-  .setMetaIndexCount(metaIndexCount)
-  .setEntryCount(entryCount)
-  .setNumDataIndexLevels(numDataIndexLevels)
-  .setFirstDataBlockOffset(firstDataBlockOffset)
-  .setLastDataBlockOffset(lastDataBlockOffset)
-  // TODO this is a classname encoded into an  HFile's trailer. We are 
going to need to have
-  // some compat code here.
-  .setComparatorClassName(comparatorClassName)
-  .setCompressionCodec(compressionCodec.ordinal());
+.setFileInfoOffset(fileInfoOffset)
+.setLoadOnOpenDataOffset(loadOnOpenDataOffset)
+.setUncompressedDataIndexSize(uncompressedDataIndexSize)
+.setTotalUncompressedBytes(totalUncompressedBytes)
+.setDataIndexCount(dataIndexCount)
+.setMetaIndexCount(metaIndexCount)
+.setEntryCount(entryCount)
+.setNumDataIndexLevels(numDataIndexLevels)
+.setFirstDataBlockOffset(firstDataBlockOffset)
+.setLastDataBlockOffset(lastDataBlockOffset)
+.setComparatorClassName(getHBase1CompatibleName(comparatorClassName))
+.setCompressionCodec(compressionCodec.ordinal());
 if (encryptionKey != null) {
   builder.setEncryptionKey(UnsafeByteOperations.unsafeWrap(encryptionKey));
 }
+return builder.build();
+  }
+
+  /**
+   * Write trailer data as protobuf.
+   * NOTE: we run a translation on the comparator name and will serialize the 
old hbase-1.x where
+   * it makes sense. See {@link #getHBase1CompatibleName(String)}.
+   */
+  void serializeAsPB(DataOutputStream output) throws IOException {
+ByteArrayOutputStream baos = new ByteArrayOutputStream();
 // We need this extra copy unfortunately to determine the final size of the
 // delimited output, see use of baos.size() below.
-builder.build().writeDelimitedTo(baos);
+toProtobuf().writeDelimitedTo(baos);
 baos.writeTo(output);
 // Pad to make up the difference between variable PB encoding length and 
the
 // length when encoded as writable under earlier V2 formats. Failure to pad
@@ -298,8 +302,6 @@ public class FixedFileTrailer {
   lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
 }
 if (trailerProto.hasComparatorClassName()) {
-  // TODO this is a 

[26/30] hbase git commit: HBASE-19864 Use protobuf instead of enum.ordinal to store SyncReplicationState

2018-02-14 Thread zhangduo
HBASE-19864 Use protobuf instead of enum.ordinal to store SyncReplicationState

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/83a91d6a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/83a91d6a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/83a91d6a

Branch: refs/heads/HBASE-19064
Commit: 83a91d6aa4af005b12eeeb8157acaaf51897b8ff
Parents: eb254a1
Author: Guanghao Zhang 
Authored: Fri Jan 26 16:50:48 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../replication/ReplicationPeerConfigUtil.java  | 22 ++---
 .../hbase/replication/SyncReplicationState.java | 17 +
 .../hbase/shaded/protobuf/RequestConverter.java |  7 +++---
 .../src/main/protobuf/Replication.proto | 13 ++
 .../replication/ZKReplicationPeerStorage.java   | 25 +---
 .../hadoop/hbase/master/MasterRpcServices.java  |  9 ---
 ...ransitPeerSyncReplicationStateProcedure.java |  9 ---
 .../TestReplicationSourceManager.java   |  2 +-
 8 files changed, 67 insertions(+), 37 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/83a91d6a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
index 86b49ea..5096824 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
@@ -398,7 +398,7 @@ public final class ReplicationPeerConfigUtil {
 ReplicationProtos.ReplicationState.State.ENABLED == 
desc.getState().getState();
 ReplicationPeerConfig config = convert(desc.getConfig());
 return new ReplicationPeerDescription(desc.getId(), enabled, config,
-
SyncReplicationState.valueOf(desc.getSyncReplicationState().getNumber()));
+  toSyncReplicationState(desc.getSyncReplicationState()));
   }
 
   public static ReplicationProtos.ReplicationPeerDescription
@@ -406,17 +406,33 @@ public final class ReplicationPeerConfigUtil {
 ReplicationProtos.ReplicationPeerDescription.Builder builder =
 ReplicationProtos.ReplicationPeerDescription.newBuilder();
 builder.setId(desc.getPeerId());
+
 ReplicationProtos.ReplicationState.Builder stateBuilder =
 ReplicationProtos.ReplicationState.newBuilder();
 stateBuilder.setState(desc.isEnabled() ? 
ReplicationProtos.ReplicationState.State.ENABLED :
 ReplicationProtos.ReplicationState.State.DISABLED);
 builder.setState(stateBuilder.build());
+
 builder.setConfig(convert(desc.getPeerConfig()));
-builder.setSyncReplicationState(
-  
ReplicationProtos.SyncReplicationState.forNumber(desc.getSyncReplicationState().ordinal()));
+
builder.setSyncReplicationState(toSyncReplicationState(desc.getSyncReplicationState()));
+
 return builder.build();
   }
 
+  public static ReplicationProtos.SyncReplicationState
+  toSyncReplicationState(SyncReplicationState state) {
+ReplicationProtos.SyncReplicationState.Builder syncReplicationStateBuilder 
=
+ReplicationProtos.SyncReplicationState.newBuilder();
+syncReplicationStateBuilder
+
.setState(ReplicationProtos.SyncReplicationState.State.forNumber(state.ordinal()));
+return syncReplicationStateBuilder.build();
+  }
+
+  public static SyncReplicationState
+  toSyncReplicationState(ReplicationProtos.SyncReplicationState state) {
+return SyncReplicationState.valueOf(state.getState().getNumber());
+  }
+
   public static ReplicationPeerConfig appendTableCFsToReplicationPeerConfig(
   Map tableCfs, ReplicationPeerConfig peerConfig) 
{
 ReplicationPeerConfigBuilder builder = 
ReplicationPeerConfig.newBuilder(peerConfig);

http://git-wip-us.apache.org/repos/asf/hbase/blob/83a91d6a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
index bd144e9..a65b144 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java

[11/30] hbase git commit: HBASE-19876 The exception happening in converting pb mutation to hbase.mutation messes up the CellScanner

2018-02-14 Thread zhangduo
HBASE-19876 The exception happening in converting pb mutation to hbase.mutation 
messes up the CellScanner


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f48fdbb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f48fdbb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f48fdbb

Branch: refs/heads/HBASE-19064
Commit: 2f48fdbb26ff555485b4aa3393d835b7dd8797a0
Parents: 16f1f5b
Author: Chia-Ping Tsai 
Authored: Sun Feb 11 03:49:53 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Feb 13 21:08:59 2018 +0800

--
 .../hbase/shaded/protobuf/RequestConverter.java |   4 +-
 .../hbase/regionserver/RSRpcServices.java   | 138 +++--
 .../client/TestMalformedCellFromClient.java | 203 +--
 3 files changed, 262 insertions(+), 83 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f48fdbb/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index 8ac7058..0afcfe1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -473,7 +473,7 @@ public final class RequestConverter {
 return regionActionBuilder;
   }
 
-  private static RegionAction.Builder getRegionActionBuilderWithRegion(
+  public static RegionAction.Builder getRegionActionBuilderWithRegion(
   final RegionAction.Builder regionActionBuilder, final byte [] 
regionName) {
 RegionSpecifier region = 
buildRegionSpecifier(RegionSpecifierType.REGION_NAME, regionName);
 regionActionBuilder.setRegion(region);
@@ -1099,7 +1099,7 @@ public final class RequestConverter {
* @return a Condition
* @throws IOException
*/
-  private static Condition buildCondition(final byte[] row, final byte[] 
family,
+  public static Condition buildCondition(final byte[] row, final byte[] family,
   final byte[] qualifier, final ByteArrayComparable comparator, final 
CompareType compareType)
   throws IOException {
 Condition.Builder builder = Condition.newBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/2f48fdbb/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 44934a6..5b4e3b8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -560,67 +560,60 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
* Mutate a list of rows atomically.
* @param cellScanner if non-null, the mutation data -- the Cell content.
*/
-  private void mutateRows(final HRegion region, final OperationQuota quota,
-  final List actions, final CellScanner cellScanner,
-  RegionActionResult.Builder builder, final ActivePolicyEnforcement 
spaceQuotaEnforcement)
-  throws IOException {
-for (ClientProtos.Action action: actions) {
-  if (action.hasGet()) {
-throw new DoNotRetryIOException("Atomic put and/or delete only, not a 
Get=" +
-  action.getGet());
-  }
-}
-doBatchOp(builder, region, quota, actions, cellScanner, 
spaceQuotaEnforcement, true);
-  }
-
-  /**
-   * Mutate a list of rows atomically.
-   * @param cellScanner if non-null, the mutation data -- the Cell content.
-   */
   private boolean checkAndRowMutate(final HRegion region, final 
List actions,
 final CellScanner cellScanner, byte[] row, 
byte[] family, byte[] qualifier,
 CompareOperator op, ByteArrayComparable 
comparator, RegionActionResult.Builder builder,
 ActivePolicyEnforcement 
spaceQuotaEnforcement) throws IOException {
-if (!region.getRegionInfo().isMetaRegion()) {
-  regionServer.cacheFlusher.reclaimMemStoreMemory();
-}
-RowMutations rm = null;
-int i = 0;
-ClientProtos.ResultOrException.Builder resultOrExceptionOrBuilder =
+int countOfCompleteMutation = 0;
+try {
+  if (!region.getRegionInfo().isMetaRegion()) {
+regionServer.cacheFlusher.reclaimMemStoreMemory();
+  }
+  RowMutations rm = null;
+ 

[20/30] hbase git commit: HBASE-19781 Add a new cluster state flag for synchronous replication

2018-02-14 Thread zhangduo
HBASE-19781 Add a new cluster state flag for synchronous replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45e25dc1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45e25dc1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45e25dc1

Branch: refs/heads/HBASE-19064
Commit: 45e25dc1e82fa25370495a68c48c840ec46cd71d
Parents: e339f3b
Author: Guanghao Zhang 
Authored: Mon Jan 22 11:44:49 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Admin.java   |  39 +
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  31 
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|   7 +
 .../hbase/client/ConnectionImplementation.java  |   9 ++
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  26 +++
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  15 ++
 .../client/ShortCircuitMasterConnection.java|   9 ++
 .../replication/ReplicationPeerConfigUtil.java  |  26 +--
 .../replication/ReplicationPeerDescription.java |  10 +-
 .../hbase/replication/SyncReplicationState.java |  48 ++
 .../hbase/shaded/protobuf/RequestConverter.java |  10 ++
 .../src/main/protobuf/Master.proto  |   4 +
 .../src/main/protobuf/MasterProcedure.proto |   6 +-
 .../src/main/protobuf/Replication.proto |  20 +++
 .../replication/ReplicationPeerStorage.java |  18 ++-
 .../hbase/replication/ReplicationUtils.java |   1 +
 .../replication/ZKReplicationPeerStorage.java   |  60 +--
 .../replication/TestReplicationStateBasic.java  |  23 ++-
 .../TestZKReplicationPeerStorage.java   |  12 +-
 .../hbase/coprocessor/MasterObserver.java   |  23 +++
 .../org/apache/hadoop/hbase/master/HMaster.java |  12 ++
 .../hbase/master/MasterCoprocessorHost.java |  21 +++
 .../hadoop/hbase/master/MasterRpcServices.java  |  17 ++
 .../hadoop/hbase/master/MasterServices.java |   9 ++
 .../procedure/PeerProcedureInterface.java   |   2 +-
 .../replication/ReplicationPeerManager.java |  51 +-
 ...ransitPeerSyncReplicationStateProcedure.java | 159 +++
 .../hbase/security/access/AccessController.java |   8 +
 .../replication/TestReplicationAdmin.java   |  62 
 .../hbase/master/MockNoopMasterServices.java|  11 +-
 .../cleaner/TestReplicationHFileCleaner.java|   4 +-
 .../TestReplicationTrackerZKImpl.java   |   6 +-
 .../TestReplicationSourceManager.java   |   3 +-
 .../security/access/TestAccessController.java   |  16 ++
 .../hbase/util/TestHBaseFsckReplication.java|   5 +-
 .../src/main/ruby/hbase/replication_admin.rb|  15 ++
 hbase-shell/src/main/ruby/shell.rb  |   1 +
 .../src/main/ruby/shell/commands/list_peers.rb  |   6 +-
 .../transit_peer_sync_replication_state.rb  |  44 +
 .../test/ruby/hbase/replication_admin_test.rb   |  24 +++
 40 files changed, 818 insertions(+), 55 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45e25dc1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index b8546fa..167d6f3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -52,6 +52,7 @@ import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+import org.apache.hadoop.hbase.replication.SyncReplicationState;
 import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
 import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
@@ -2648,6 +2649,44 @@ public interface Admin extends Abortable, Closeable {
   List listReplicationPeers(Pattern pattern) 
throws IOException;
 
   /**
+   * Transit current cluster to a new state in a synchronous replication peer.
+   * @param peerId a short name that identifies the peer
+   * @param state a new state of current cluster
+   * @throws IOException if a remote or network exception occurs
+   */
+  void transitReplicationPeerSyncReplicationState(String peerId, 
SyncReplicationState state)
+  throws IOException;
+
+  /**
+   * Transit current cluster to a new state in a synchronous replication peer. 
But does not block
+   * and wait for it.
+   * 
+   * You can use Future.get(long, TimeUnit) to wait on the 

[03/30] hbase git commit: HBASE-19972 Should rethrow the RetriesExhaustedWithDetailsException when failed to apply the batch in ReplicationSink; ADDENDUM to fix TestReplicationSink

2018-02-14 Thread zhangduo
HBASE-19972 Should rethrow the RetriesExhaustedWithDetailsException when failed 
to apply the batch in ReplicationSink; ADDENDUM to fix TestReplicationSink


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00f88773
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00f88773
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00f88773

Branch: refs/heads/HBASE-19064
Commit: 00f88773239b96e256c585fae98d846e2b65b4a4
Parents: 1c67d8a
Author: Michael Stack 
Authored: Mon Feb 12 10:47:06 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 10:47:25 2018 -0800

--
 .../hbase/master/assignment/AssignProcedure.java   |  4 ++--
 .../replication/regionserver/TestReplicationSink.java  | 13 +++--
 2 files changed, 9 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/00f88773/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
index 801caf5..65eafe7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
@@ -158,7 +158,7 @@ public class AssignProcedure extends 
RegionTransitionProcedure {
   LOG.info("Assigned, not reassigning; " + this + "; " + 
regionNode.toShortString());
   return false;
 }
-// Don't assign if table is in disabling of disabled state.
+// Don't assign if table is in disabling or disabled state.
 TableStateManager tsm = env.getMasterServices().getTableStateManager();
 TableName tn = regionNode.getRegionInfo().getTable();
 if (tsm.isTableState(tn, TableState.State.DISABLING, 
TableState.State.DISABLED)) {
@@ -166,7 +166,7 @@ public class AssignProcedure extends 
RegionTransitionProcedure {
   return false;
 }
 // If the region is SPLIT, we can't assign it. But state might be CLOSED, 
rather than
-// SPLIT which is what a region gets set to when Unassigned as part of 
SPLIT. FIX.
+// SPLIT which is what a region gets set to when unassigned as part of 
SPLIT. FIX.
 if (regionNode.isInState(State.SPLIT) ||
 (regionNode.getRegionInfo().isOffline() && 
regionNode.getRegionInfo().isSplit())) {
   LOG.info("SPLIT, cannot be assigned; " + this + "; " + regionNode +

http://git-wip-us.apache.org/repos/asf/hbase/blob/00f88773/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
index fcce84f..aa6c39c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -317,8 +317,8 @@ public class TestReplicationSink {
 Path dir = TEST_UTIL.getDataTestDirOnTestFS("testReplicateEntries");
 Path familyDir = new Path(dir, Bytes.toString(FAM_NAME1));
 int numRows = 10;
-
 List p = new ArrayList<>(1);
+final String hfilePrefix = "hfile-";
 
 // 1. Generate 25 hfile ranges
 Random rng = new SecureRandom();
@@ -335,7 +335,7 @@ public class TestReplicationSink {
 FileSystem fs = dir.getFileSystem(conf);
 Iterator numbersItr = numberList.iterator();
 for (int i = 0; i < 25; i++) {
-  Path hfilePath = new Path(familyDir, "hfile_" + i);
+  Path hfilePath = new Path(familyDir, hfilePrefix + i);
   HFileTestUtil.createHFile(conf, fs, hfilePath, FAM_NAME1, FAM_NAME1,
 Bytes.toBytes(numbersItr.next()), Bytes.toBytes(numbersItr.next()), 
numRows);
   p.add(hfilePath);
@@ -370,10 +370,10 @@ public class TestReplicationSink {
   
.append(Bytes.toString(TABLE_NAME1.getName())).append(Path.SEPARATOR)
   
.append(Bytes.toString(loadDescriptor.getEncodedRegionName().toByteArray()))
   
.append(Path.SEPARATOR).append(Bytes.toString(FAM_NAME1)).append(Path.SEPARATOR)
-  .append("hfile_" + i).toString();
+  

[19/30] hbase git commit: HBASE-19781 Add a new cluster state flag for synchronous replication

2018-02-14 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/45e25dc1/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
index 8911982..f5eca39 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
@@ -28,6 +28,7 @@ import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerStorage;
 import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
 import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
+import org.apache.hadoop.hbase.replication.SyncReplicationState;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.ReplicationTests;
 import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
@@ -67,9 +68,9 @@ public class TestHBaseFsckReplication {
 String peerId1 = "1";
 String peerId2 = "2";
 peerStorage.addPeer(peerId1, 
ReplicationPeerConfig.newBuilder().setClusterKey("key").build(),
-  true);
+  true, SyncReplicationState.NONE);
 peerStorage.addPeer(peerId2, 
ReplicationPeerConfig.newBuilder().setClusterKey("key").build(),
-  true);
+  true, SyncReplicationState.NONE);
 for (int i = 0; i < 10; i++) {
   queueStorage.addWAL(ServerName.valueOf("localhost", 1 + i, 10 + 
i), peerId1,
 "file-" + i);

http://git-wip-us.apache.org/repos/asf/hbase/blob/45e25dc1/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index ba7d191..d5d4844 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -20,6 +20,7 @@
 include Java
 
 java_import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil
+java_import org.apache.hadoop.hbase.replication.SyncReplicationState
 java_import org.apache.hadoop.hbase.replication.ReplicationPeerConfig
 java_import org.apache.hadoop.hbase.util.Bytes
 java_import org.apache.hadoop.hbase.zookeeper.ZKConfig
@@ -329,6 +330,20 @@ module Hbase
   '!' + ReplicationPeerConfigUtil.convertToString(tableCFs)
 end
 
+# Transit current cluster to a new state in the specified synchronous
+# replication peer
+def transit_peer_sync_replication_state(id, state)
+  if 'ACTIVE'.eql?(state)
+@admin.transitReplicationPeerSyncReplicationState(id, 
SyncReplicationState::ACTIVE)
+  elsif 'DOWNGRADE_ACTIVE'.eql?(state)
+@admin.transitReplicationPeerSyncReplicationState(id, 
SyncReplicationState::DOWNGRADE_ACTIVE)
+  elsif 'STANDBY'.eql?(state)
+@admin.transitReplicationPeerSyncReplicationState(id, 
SyncReplicationState::STANDBY)
+  else
+raise(ArgumentError, 'synchronous replication state must be ACTIVE, 
DOWNGRADE_ACTIVE or STANDBY')
+  end
+end
+
 
#--
 # Enables a table's replication switch
 def enable_tablerep(table_name)

http://git-wip-us.apache.org/repos/asf/hbase/blob/45e25dc1/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index 507c0a9..0ed71ae 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -395,6 +395,7 @@ Shell.load_command_group(
 get_peer_config
 list_peer_configs
 update_peer_config
+transit_peer_sync_replication_state
   ]
 )
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/45e25dc1/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/list_peers.rb 
b/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
index caeab86..aa10fda 100644
--- a/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
@@ -39,8 +39,8 @@ EOF
 peers = replication_admin.list_peers
 
 formatter.header(%w[PEER_ID CLUSTER_KEY ENDPOINT_CLASSNAME
-REMOTE_ROOT_DIR STATE REPLICATE_ALL
-NAMESPACES TABLE_CFS BANDWIDTH])
+REMOTE_ROOT_DIR SYNC_REPLICATION_STATE STATE
+REPLICATE_ALL NAMESPACES TABLE_CFS BANDWIDTH])
 
 

[23/30] hbase git commit: HBASE-19747 Introduce a special WALProvider for synchronous replication

2018-02-14 Thread zhangduo
HBASE-19747 Introduce a special WALProvider for synchronous replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e339f3b4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e339f3b4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e339f3b4

Branch: refs/heads/HBASE-19064
Commit: e339f3b44146c0e01cc68c708d79dda1fb6bb971
Parents: 0d59832
Author: zhangduo 
Authored: Fri Jan 19 18:38:39 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../hbase/regionserver/wal/AbstractFSWAL.java   |   7 +
 .../hbase/regionserver/wal/AsyncFSWAL.java  |   1 -
 .../hbase/regionserver/wal/DualAsyncFSWAL.java  |   4 +-
 .../hadoop/hbase/regionserver/wal/FSHLog.java   |   3 -
 .../regionserver/PeerActionListener.java|  33 +++
 .../SynchronousReplicationPeerProvider.java |  35 +++
 .../hadoop/hbase/wal/AbstractFSWALProvider.java |   1 +
 .../hadoop/hbase/wal/AsyncFSWALProvider.java|  18 +-
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  |   8 +-
 .../hbase/wal/RegionGroupingProvider.java   |  13 +-
 .../wal/SynchronousReplicationWALProvider.java  | 225 +++
 .../org/apache/hadoop/hbase/wal/WALFactory.java |  37 ++-
 .../org/apache/hadoop/hbase/wal/WALKeyImpl.java |  16 +-
 .../regionserver/TestCompactionPolicy.java  |   1 +
 .../regionserver/TestFailedAppendAndSync.java   | 122 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  24 +-
 .../TestHRegionWithInMemoryFlush.java   |   7 -
 .../hbase/regionserver/TestRegionIncrement.java |  20 +-
 .../hbase/regionserver/TestWALLockup.java   |   1 +
 .../regionserver/wal/AbstractTestWALReplay.java |   1 +
 .../regionserver/wal/ProtobufLogTestHelper.java |  44 +++-
 .../hbase/regionserver/wal/TestAsyncFSWAL.java  |  13 +-
 .../regionserver/wal/TestAsyncWALReplay.java|   4 +-
 .../wal/TestCombinedAsyncWriter.java|   3 +-
 .../hbase/regionserver/wal/TestFSHLog.java  |  15 +-
 .../hbase/regionserver/wal/TestWALReplay.java   |   1 +
 .../apache/hadoop/hbase/wal/IOTestProvider.java |   2 -
 .../TestSynchronousReplicationWALProvider.java  | 153 +
 28 files changed, 659 insertions(+), 153 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e339f3b4/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index 14fbe10..31b1c54 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -430,6 +430,13 @@ public abstract class AbstractFSWAL 
implements WAL {
 this.implClassName = getClass().getSimpleName();
   }
 
+  /**
+   * Used to initialize the WAL. Usually just call rollWriter to create the 
first log writer.
+   */
+  public void init() throws IOException {
+rollWriter();
+  }
+
   @Override
   public void registerWALActionsListener(WALActionsListener listener) {
 this.listeners.add(listener);

http://git-wip-us.apache.org/repos/asf/hbase/blob/e339f3b4/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index 8e57441..ac72dc7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -247,7 +247,6 @@ public class AsyncFSWAL extends AbstractFSWAL {
 batchSize = conf.getLong(WAL_BATCH_SIZE, DEFAULT_WAL_BATCH_SIZE);
 waitOnShutdownInSeconds = 
conf.getInt(ASYNC_WAL_WAIT_ON_SHUTDOWN_IN_SECONDS,
   DEFAULT_ASYNC_WAL_WAIT_ON_SHUTDOWN_IN_SECONDS);
-rollWriter();
   }
 
   private static boolean waitingRoll(int epochAndState) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e339f3b4/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
index 6bf9e02..f92ce93 100644
--- 

[28/30] hbase git commit: HBASE-19078 Add a remote peer cluster wal directory config for synchronous replication

2018-02-14 Thread zhangduo
HBASE-19078 Add a remote peer cluster wal directory config for synchronous 
replication

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d598326
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d598326
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d598326

Branch: refs/heads/HBASE-19064
Commit: 0d59832653187a12c69625f862a8e1c130a0aa94
Parents: d93877b
Author: Guanghao Zhang 
Authored: Sat Jan 13 18:55:28 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../replication/ReplicationPeerConfigUtil.java  |  6 ++
 .../replication/ReplicationPeerConfig.java  | 21 +-
 .../ReplicationPeerConfigBuilder.java   |  7 ++
 .../src/main/protobuf/Replication.proto |  1 +
 .../replication/ReplicationPeerManager.java | 15 
 .../replication/TestReplicationAdmin.java   | 77 
 .../src/main/ruby/hbase/replication_admin.rb| 17 +++--
 hbase-shell/src/main/ruby/hbase_constants.rb|  1 +
 .../src/main/ruby/shell/commands/add_peer.rb| 21 +-
 .../src/main/ruby/shell/commands/list_peers.rb  | 19 -
 .../test/ruby/hbase/replication_admin_test.rb   | 16 
 11 files changed, 188 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d598326/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
index a234a9b..642149b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
@@ -315,6 +315,9 @@ public final class ReplicationPeerConfigUtil {
 
excludeNamespacesList.stream().map(ByteString::toStringUtf8).collect(Collectors.toSet()));
 }
 
+if (peer.hasRemoteWALDir()) {
+  builder.setRemoteWALDir(peer.getRemoteWALDir());
+}
 return builder.build();
   }
 
@@ -371,6 +374,9 @@ public final class ReplicationPeerConfigUtil {
   }
 }
 
+if (peerConfig.getRemoteWALDir() != null) {
+  builder.setRemoteWALDir(peerConfig.getRemoteWALDir());
+}
 return builder.build();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0d598326/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index bf8d030..4c10c46 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -46,6 +46,8 @@ public class ReplicationPeerConfig {
   private Map excludeTableCFsMap = 
null;
   private Set excludeNamespaces = null;
   private long bandwidth = 0;
+  // Used by synchronous replication
+  private String remoteWALDir;
 
   private ReplicationPeerConfig(ReplicationPeerConfigBuilderImpl builder) {
 this.clusterKey = builder.clusterKey;
@@ -64,6 +66,7 @@ public class ReplicationPeerConfig {
 builder.excludeNamespaces != null ? 
Collections.unmodifiableSet(builder.excludeNamespaces)
 : null;
 this.bandwidth = builder.bandwidth;
+this.remoteWALDir = builder.remoteWALDir;
   }
 
   private Map
@@ -210,6 +213,10 @@ public class ReplicationPeerConfig {
 return this;
   }
 
+  public String getRemoteWALDir() {
+return this.remoteWALDir;
+  }
+
   public static ReplicationPeerConfigBuilder newBuilder() {
 return new ReplicationPeerConfigBuilderImpl();
   }
@@ -223,7 +230,8 @@ public class ReplicationPeerConfig {
 .setReplicateAllUserTables(peerConfig.replicateAllUserTables())
 .setExcludeTableCFsMap(peerConfig.getExcludeTableCFsMap())
 .setExcludeNamespaces(peerConfig.getExcludeNamespaces())
-.setBandwidth(peerConfig.getBandwidth());
+.setBandwidth(peerConfig.getBandwidth())
+.setRemoteWALDir(peerConfig.getRemoteWALDir());
 return builder;
   }
 
@@ -250,6 +258,8 @@ public class ReplicationPeerConfig {
 
 private long bandwidth = 0;
 
+private String remoteWALDir = null;
+
 @Override
 public 

[21/30] hbase git commit: HBASE-19857 Complete the procedure for adding a sync replication peer

2018-02-14 Thread zhangduo
HBASE-19857 Complete the procedure for adding a sync replication peer


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eb254a10
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eb254a10
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eb254a10

Branch: refs/heads/HBASE-19064
Commit: eb254a10c5c8b135aba9d87534c1ac1de12adb2d
Parents: 45e25dc
Author: zhangduo 
Authored: Thu Jan 25 20:09:00 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../hbase/replication/ReplicationPeer.java  |   9 +
 .../hbase/replication/ReplicationPeerImpl.java  |  28 +--
 .../hbase/replication/ReplicationPeers.java |   3 +-
 .../regionserver/PeerActionListener.java|  10 +-
 .../SyncReplicationPeerProvider.java|  35 +++
 .../SynchronousReplicationPeerProvider.java |  35 ---
 .../hbase/wal/SyncReplicationWALProvider.java   | 234 +++
 .../wal/SynchronousReplicationWALProvider.java  | 225 --
 .../org/apache/hadoop/hbase/wal/WALFactory.java |   8 +-
 .../TestReplicationSourceManager.java   |   3 +
 .../wal/TestSyncReplicationWALProvider.java | 153 
 .../TestSynchronousReplicationWALProvider.java  | 153 
 12 files changed, 456 insertions(+), 440 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eb254a10/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
index 2da3cce..0196a9a 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
@@ -54,6 +54,15 @@ public interface ReplicationPeer {
   PeerState getPeerState();
 
   /**
+   * Returns the sync replication state of the peer by reading local cache.
+   * 
+   * If the peer is not a synchronous replication peer, a {@link 
SyncReplicationState#NONE} will be
+   * returned.
+   * @return the sync replication state
+   */
+  SyncReplicationState getSyncReplicationState();
+
+  /**
* Test whether the peer is enabled.
* @return {@code true} if enabled, otherwise {@code false}.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/eb254a10/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
index d656466..ff3f662 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
@@ -36,6 +36,8 @@ public class ReplicationPeerImpl implements ReplicationPeer {
 
   private volatile PeerState peerState;
 
+  private volatile SyncReplicationState syncReplicationState;
+
   private final List peerConfigListeners;
 
   /**
@@ -45,12 +47,13 @@ public class ReplicationPeerImpl implements ReplicationPeer 
{
* @param id string representation of this peer's identifier
* @param peerConfig configuration for the replication peer
*/
-  public ReplicationPeerImpl(Configuration conf, String id, boolean peerState,
-  ReplicationPeerConfig peerConfig) {
+  public ReplicationPeerImpl(Configuration conf, String id, 
ReplicationPeerConfig peerConfig,
+  boolean peerState, SyncReplicationState syncReplicationState) {
 this.conf = conf;
 this.id = id;
 this.peerState = peerState ? PeerState.ENABLED : PeerState.DISABLED;
 this.peerConfig = peerConfig;
+this.syncReplicationState = syncReplicationState;
 this.peerConfigListeners = new ArrayList<>();
   }
 
@@ -77,37 +80,26 @@ public class ReplicationPeerImpl implements ReplicationPeer 
{
 return peerState;
   }
 
-  /**
-   * Get the peer config object
-   * @return the ReplicationPeerConfig for this peer
-   */
+  @Override
+  public SyncReplicationState getSyncReplicationState() {
+return syncReplicationState;
+  }
+
   @Override
   public ReplicationPeerConfig getPeerConfig() {
 return peerConfig;
   }
 
-  /**
-   * Get the configuration object required to communicate with this peer
-   * @return configuration object
-   */
   @Override
   public Configuration getConfiguration() {
 return conf;
   }
 
-  /**
- 

[06/30] hbase git commit: HBASE-19986 If HBaseTestClassRule timesout a test, thread dump; ADDENDUM; white-space, checkstyle, and rb feedback by Duo

2018-02-14 Thread zhangduo
 HBASE-19986 If HBaseTestClassRule timesout a test, thread dump; ADDENDUM; 
white-space, checkstyle, and rb feedback by Duo


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/24bed6b3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/24bed6b3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/24bed6b3

Branch: refs/heads/HBASE-19064
Commit: 24bed6b3fbb54a20ef67201c160de649421cdd51
Parents: 7cc239f
Author: Michael Stack 
Authored: Mon Feb 12 18:12:08 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 18:12:24 2018 -0800

--
 .../apache/hadoop/hbase/HBaseClassTestRule.java |  2 +-
 .../org/apache/hadoop/hbase/TestTimeout.java|  9 +++-
 .../hadoop/hbase/TimedOutTestsListener.java | 22 ++--
 3 files changed, 15 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/24bed6b3/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
index b964872..d47b213 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/hbase/blob/24bed6b3/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
index 495667c..d547ddb 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
@@ -33,7 +33,7 @@ public class TestTimeout {
 
 @Test
 public void run1() throws InterruptedException {
-Thread.sleep(100);
+  Thread.sleep(100);
 }
 
 /**
@@ -46,14 +46,11 @@ public class TestTimeout {
   Thread t = new Thread("HangingThread") {
 public void run() {
   synchronized(this) {
-while(true) {
-}
+while(true) {}
   }
 }
   };
   t.start();
-  while (true) {
-// Just hang out too.
-  }
+  while (true) {}
}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/24bed6b3/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
index d5c87f3..643058c 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
@@ -40,15 +40,15 @@ import org.junit.runner.notification.RunListener;
 public class TimedOutTestsListener extends RunListener {
 
   static final String TEST_TIMED_OUT_PREFIX = "test timed out after";
-  
+
   private static String INDENT = "";
 
   private final PrintWriter output;
-  
+
   public TimedOutTestsListener() {
 this.output = new PrintWriter(System.err);
   }
-  
+
   public TimedOutTestsListener(PrintWriter output) {
 this.output = output;
   }
@@ -63,16 +63,16 @@ public class TimedOutTestsListener extends RunListener {
 }
 output.flush();
   }
-  
+
   public static String buildThreadDiagnosticString() {
 StringWriter sw = new StringWriter();
 PrintWriter output = new PrintWriter(sw);
-
+
 DateFormat dateFormat = new SimpleDateFormat("-MM-dd hh:mm:ss,SSS");
 output.println(String.format("Timestamp: %s", dateFormat.format(new 
Date(;
 output.println();
 output.println(buildThreadDump());
-
+
 String deadlocksInfo = buildDeadlockInfo();
 if (deadlocksInfo != null) {
   output.println("> DEADLOCKS DETECTED <");
@@ -106,28 +106,28 @@ public class TimedOutTestsListener extends RunListener {
 }
 return dump.toString();
   }
-  
+
   static String buildDeadlockInfo() {
 ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
 long[] threadIds = threadBean.findMonitorDeadlockedThreads();
 if (threadIds != null && threadIds.length > 0) {
   StringWriter stringWriter = new 

[10/30] hbase git commit: HBASE-19977 FileMmapEngine allocation of byte buffers should be synchronized (Ram)

2018-02-14 Thread zhangduo
HBASE-19977 FileMmapEngine allocation of byte buffers should be
synchronized (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/16f1f5b4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/16f1f5b4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/16f1f5b4

Branch: refs/heads/HBASE-19064
Commit: 16f1f5b49424fcabc9b5c10882dab4f5bf7fa84b
Parents: b4622ff
Author: Vasudevan 
Authored: Tue Feb 13 15:49:37 2018 +0530
Committer: Vasudevan 
Committed: Tue Feb 13 15:49:37 2018 +0530

--
 .../apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java  | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/16f1f5b4/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
index e2f0191..82f42cd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
@@ -69,12 +70,11 @@ public class FileMmapEngine implements IOEngine {
   throw ioex;
 }
 ByteBufferAllocator allocator = new ByteBufferAllocator() {
-  int pos = 0;
+  AtomicInteger pos = new AtomicInteger(0);
   @Override
   public ByteBuffer allocate(long size) throws IOException {
 ByteBuffer buffer = 
fileChannel.map(java.nio.channels.FileChannel.MapMode.READ_WRITE,
-pos * size, size);
-pos++;
+  pos.getAndIncrement() * size, size);
 return buffer;
   }
 };
@@ -106,7 +106,7 @@ public class FileMmapEngine implements IOEngine {
 byte[] dst = new byte[length];
 bufferArray.getMultiple(offset, length, dst);
 return deserializer.deserialize(new SingleByteBuff(ByteBuffer.wrap(dst)), 
true,
-MemoryType.EXCLUSIVE);
+  MemoryType.EXCLUSIVE);
   }
 
   /**



[01/30] hbase git commit: HBASE-16060 1.x clients cannot access table state talking to 2.0 cluster [Forced Update!]

2018-02-14 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/HBASE-19064 e7d56a3ef -> 33d0606a2 (forced update)


HBASE-16060 1.x clients cannot access table state talking to 2.0 cluster

This patch adds mirroring of table state out to zookeeper. HBase-1.x
clients look for table state in zookeeper, not in hbase:meta where
hbase-2.x maintains table state.

The patch also moves and refactors the 'migration' code that was put in
place by HBASE-13032.

D 
hbase-client/src/main/java/org/apache/hadoop/hbase/CoordinatedStateException.java
 Unused.

M hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
 Move table state migration code from Master startup out to
TableStateManager where it belongs. Also start
MirroringTableStateManager dependent on config.

A 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/MirroringTableStateManager.java

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
 Move migration from zookeeper of table state in here. Also plumb in
mechanism so subclass can get a chance to look at table state as we do
the startup fixup full-table scan of meta.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
 Bug-fix. Now we create regions in CLOSED state but we fail to check
table state; were presuming table always enabled. Meant on startup
there'd be an unassigned region that never got assigned.

A 
hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMirroringTableStateManager.java
 Test migration and mirroring.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/67b69fb2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/67b69fb2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/67b69fb2

Branch: refs/heads/HBASE-19064
Commit: 67b69fb2c70d3a56ac45f59d57b7f2778094a566
Parents: 8ff783f
Author: Michael Stack 
Authored: Thu Feb 8 00:19:06 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 08:47:02 2018 -0800

--
 .../hadoop/hbase/CoordinatedStateException.java |  46 ---
 .../hadoop/hbase/zookeeper/ZNodePaths.java  |   3 +
 .../src/main/protobuf/ZooKeeper.proto   |   2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |  27 ++--
 .../hbase/master/MasterMetaBootstrap.java   |   3 +
 .../master/MirroringTableStateManager.java  | 109 +++
 .../hadoop/hbase/master/TableStateManager.java  | 132 ---
 .../master/assignment/AssignmentManager.java|  11 +-
 .../master/procedure/DeleteTableProcedure.java  |   5 +-
 .../master/procedure/ProcedureSyncWait.java |   3 +-
 .../procedure/TruncateTableProcedure.java   |   5 +-
 .../hadoop/hbase/util/ZKDataMigrator.java   |  11 +-
 .../hbase/master/TestMasterNoCluster.java   |   3 +-
 .../master/TestMirroringTableStateManager.java  | 105 +++
 .../hbase/master/TestTableStateManager.java |  70 +-
 15 files changed, 405 insertions(+), 130 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/67b69fb2/hbase-client/src/main/java/org/apache/hadoop/hbase/CoordinatedStateException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoordinatedStateException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoordinatedStateException.java
deleted file mode 100644
index fc0c4bc..000
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoordinatedStateException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.exceptions.HBaseException;
-
-/**
- * Thrown by operations requiring coordination state access or manipulation
- * when internal error within coordination engine (or other internal 
implementation) occurs.
- */
-@InterfaceAudience.Private
-@SuppressWarnings("serial")
-public class 

[18/30] hbase git commit: HBASE-19980 NullPointerException when restoring a snapshot after splitting a region

2018-02-14 Thread zhangduo
HBASE-19980 NullPointerException when restoring a snapshot after splitting a 
region

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d0f2d18c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d0f2d18c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d0f2d18c

Branch: refs/heads/HBASE-19064
Commit: d0f2d18ca73737764550b319f749a51c876cca39
Parents: 8d26736
Author: Toshihiro Suzuki 
Authored: Wed Feb 14 19:55:59 2018 +0900
Committer: tedyu 
Committed: Wed Feb 14 09:37:16 2018 -0800

--
 .../hbase/snapshot/RestoreSnapshotHelper.java   | 89 
 .../client/TestRestoreSnapshotFromClient.java   | 20 +
 2 files changed, 73 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d0f2d18c/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 404f8ff..c4f0e25 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -195,11 +195,33 @@ public class RestoreSnapshotHelper {
 // this instance, by removing the regions already present in the restore 
dir.
 Set regionNames = new HashSet<>(regionManifests.keySet());
 
+List tableRegions = getTableRegions();
+
 RegionInfo mobRegion = 
MobUtils.getMobRegionInfo(snapshotManifest.getTableDescriptor()
 .getTableName());
+if (tableRegions != null) {
+  // restore the mob region in case
+  if (regionNames.contains(mobRegion.getEncodedName())) {
+monitor.rethrowException();
+status.setStatus("Restoring mob region...");
+List mobRegions = new ArrayList<>(1);
+mobRegions.add(mobRegion);
+restoreHdfsMobRegions(exec, regionManifests, mobRegions);
+regionNames.remove(mobRegion.getEncodedName());
+status.setStatus("Finished restoring mob region.");
+  }
+}
+if (regionNames.contains(mobRegion.getEncodedName())) {
+  // add the mob region
+  monitor.rethrowException();
+  status.setStatus("Cloning mob region...");
+  cloneHdfsMobRegion(regionManifests, mobRegion);
+  regionNames.remove(mobRegion.getEncodedName());
+  status.setStatus("Finished cloning mob region.");
+}
+
 // Identify which region are still available and which not.
 // NOTE: we rely upon the region name as: "table name, start key, end key"
-List tableRegions = getTableRegions();
 if (tableRegions != null) {
   monitor.rethrowException();
   for (RegionInfo regionInfo: tableRegions) {
@@ -213,50 +235,40 @@ public class RestoreSnapshotHelper {
   metaChanges.addRegionToRemove(regionInfo);
 }
   }
-
-  // Restore regions using the snapshot data
-  monitor.rethrowException();
-  status.setStatus("Restoring table regions...");
-  if (regionNames.contains(mobRegion.getEncodedName())) {
-// restore the mob region in case
-List mobRegions = new ArrayList<>(1);
-mobRegions.add(mobRegion);
-restoreHdfsMobRegions(exec, regionManifests, mobRegions);
-regionNames.remove(mobRegion.getEncodedName());
-  }
-  restoreHdfsRegions(exec, regionManifests, 
metaChanges.getRegionsToRestore());
-  status.setStatus("Finished restoring all table regions.");
-
-  // Remove regions from the current table
-  monitor.rethrowException();
-  status.setStatus("Starting to delete excess regions from table");
-  removeHdfsRegions(exec, metaChanges.getRegionsToRemove());
-  status.setStatus("Finished deleting excess regions from table.");
 }
 
 // Regions to Add: present in the snapshot but not in the current table
+List regionsToAdd = new ArrayList<>(regionNames.size());
 if (regionNames.size() > 0) {
-  List regionsToAdd = new ArrayList<>(regionNames.size());
-
   monitor.rethrowException();
-  // add the mob region
-  if (regionNames.contains(mobRegion.getEncodedName())) {
-cloneHdfsMobRegion(regionManifests, mobRegion);
-regionNames.remove(mobRegion.getEncodedName());
-  }
   for (String regionName: regionNames) {
 LOG.info("region to add: " + regionName);
-
regionsToAdd.add(ProtobufUtil.toRegionInfo(regionManifests.get(regionName).getRegionInfo()));
+
regionsToAdd.add(ProtobufUtil.toRegionInfo(regionManifests.get(regionName)
+   

[02/30] hbase git commit: HBASE-19968 MapReduce test fails with NoClassDefFoundError against hadoop3

2018-02-14 Thread zhangduo
HBASE-19968 MapReduce test fails with NoClassDefFoundError against hadoop3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1c67d8a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1c67d8a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1c67d8a4

Branch: refs/heads/HBASE-19064
Commit: 1c67d8a46f644275484d0ae3554cb892e81882ba
Parents: 67b69fb
Author: tedyu 
Authored: Mon Feb 12 09:54:00 2018 -0800
Committer: tedyu 
Committed: Mon Feb 12 09:54:00 2018 -0800

--
 hbase-mapreduce/pom.xml | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1c67d8a4/hbase-mapreduce/pom.xml
--
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 4a416dc..76dd015 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -426,6 +426,10 @@
   hadoop-common
 
 
+  org.apache.hadoop
+  hadoop-hdfs
+
+
   
   org.apache.hadoop
   hadoop-minicluster



[12/30] hbase git commit: Revert "HBASE-19970 Remove unused functions from TableAuthManager."

2018-02-14 Thread zhangduo
Revert "HBASE-19970 Remove unused functions from TableAuthManager."

This reverts commit 7cc239fb5ac0ce3f22d93d1dbf7e80609427710a.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ba402b1e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ba402b1e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ba402b1e

Branch: refs/heads/HBASE-19064
Commit: ba402b1e7b446144d4d20f90cb71e6aa19ecce3c
Parents: 2f48fdb
Author: Michael Stack 
Authored: Tue Feb 13 06:19:08 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 13 06:19:08 2018 -0800

--
 .../security/access/AccessControlLists.java |  3 +-
 .../hbase/security/access/AccessController.java |  6 +-
 .../hbase/security/access/TableAuthManager.java | 75 
 .../security/access/TestTablePermissions.java   |  2 +-
 .../access/TestZKPermissionWatcher.java | 55 +++---
 5 files changed, 108 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ba402b1e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index 663d0c5..b0f33bd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -644,7 +644,8 @@ public class AccessControlLists {
*
* Writes a set of permission [user: table permission]
*/
-  public static byte[] writePermissionsAsBytes(ListMultimap perms) {
+  public static byte[] writePermissionsAsBytes(ListMultimap perms,
+  Configuration conf) {
 return 
ProtobufUtil.prependPBMagic(AccessControlUtil.toUserTablePermissions(perms).toByteArray());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ba402b1e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 3ac92b8..1fbf01d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -247,7 +247,7 @@ public class AccessController implements MasterCoprocessor, 
RegionCoprocessor,
   tables.entrySet()) {
   byte[] entry = t.getKey();
   ListMultimap perms = t.getValue();
-  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
+  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
   getAuthManager().getZKPermissionWatcher().writeToZookeeper(entry, 
serialized);
 }
 initialized = true;
@@ -284,7 +284,7 @@ public class AccessController implements MasterCoprocessor, 
RegionCoprocessor,
 currentEntry = entry;
 ListMultimap perms =
 AccessControlLists.getPermissions(conf, entry, t);
-byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
+byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
 zkw.writeToZookeeper(entry, serialized);
   }
 } catch(IOException ex) {
@@ -2456,7 +2456,7 @@ public class AccessController implements 
MasterCoprocessor, RegionCoprocessor,
   throws IOException {
 requirePermission(ctx, "replicateLogEntries", Action.WRITE);
   }
-
+  
   @Override
   public void  
preClearCompactionQueues(ObserverContext 
ctx)
   throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/ba402b1e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index fdfd5c8..76feff4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ -656,6 +656,81 @@ public class TableAuthManager implements Closeable {
 tableCache.remove(table);
   }
 
+  

[05/30] hbase git commit: HBASE-19970 Remove unused functions from TableAuthManager.

2018-02-14 Thread zhangduo
HBASE-19970 Remove unused functions from TableAuthManager.

Functions deleted: setTableUserPermissions, setTableGroupPermissions, 
setNamespaceUserPermissions,
setNamespaceGroupPermissions, writeTableToZooKeeper, writeNamespaceToZooKeeper


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7cc239fb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7cc239fb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7cc239fb

Branch: refs/heads/HBASE-19064
Commit: 7cc239fb5ac0ce3f22d93d1dbf7e80609427710a
Parents: c2ee82c
Author: Apekshit Sharma 
Authored: Fri Feb 9 18:32:20 2018 -0800
Committer: Apekshit Sharma 
Committed: Mon Feb 12 16:01:29 2018 -0800

--
 .../security/access/AccessControlLists.java |  3 +-
 .../hbase/security/access/AccessController.java |  6 +-
 .../hbase/security/access/TableAuthManager.java | 75 
 .../security/access/TestTablePermissions.java   |  2 +-
 .../access/TestZKPermissionWatcher.java | 55 +++---
 5 files changed, 33 insertions(+), 108 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7cc239fb/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index b0f33bd..663d0c5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -644,8 +644,7 @@ public class AccessControlLists {
*
* Writes a set of permission [user: table permission]
*/
-  public static byte[] writePermissionsAsBytes(ListMultimap perms,
-  Configuration conf) {
+  public static byte[] writePermissionsAsBytes(ListMultimap perms) {
 return 
ProtobufUtil.prependPBMagic(AccessControlUtil.toUserTablePermissions(perms).toByteArray());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/7cc239fb/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 1fbf01d..3ac92b8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -247,7 +247,7 @@ public class AccessController implements MasterCoprocessor, 
RegionCoprocessor,
   tables.entrySet()) {
   byte[] entry = t.getKey();
   ListMultimap perms = t.getValue();
-  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
+  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
   getAuthManager().getZKPermissionWatcher().writeToZookeeper(entry, 
serialized);
 }
 initialized = true;
@@ -284,7 +284,7 @@ public class AccessController implements MasterCoprocessor, 
RegionCoprocessor,
 currentEntry = entry;
 ListMultimap perms =
 AccessControlLists.getPermissions(conf, entry, t);
-byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
+byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
 zkw.writeToZookeeper(entry, serialized);
   }
 } catch(IOException ex) {
@@ -2456,7 +2456,7 @@ public class AccessController implements 
MasterCoprocessor, RegionCoprocessor,
   throws IOException {
 requirePermission(ctx, "replicateLogEntries", Action.WRITE);
   }
-  
+
   @Override
   public void  
preClearCompactionQueues(ObserverContext 
ctx)
   throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/7cc239fb/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index 76feff4..fdfd5c8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ 

[30/30] hbase git commit: HBASE-19990 Create remote wal directory when transitting to state S

2018-02-14 Thread zhangduo
HBASE-19990 Create remote wal directory when transitting to state S


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/33d0606a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/33d0606a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/33d0606a

Branch: refs/heads/HBASE-19064
Commit: 33d0606a24feb6aa9103ddc1f0dd2b483cb8d931
Parents: 19e90b1
Author: zhangduo 
Authored: Wed Feb 14 16:01:16 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:32:47 2018 +0800

--
 .../procedure2/ProcedureYieldException.java |  9 --
 .../hbase/replication/ReplicationUtils.java |  2 ++
 .../hadoop/hbase/master/MasterFileSystem.java   | 23 
 .../master/procedure/MasterProcedureEnv.java|  5 
 ...ransitPeerSyncReplicationStateProcedure.java | 29 
 .../hbase/replication/TestSyncReplication.java  |  8 ++
 6 files changed, 58 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/33d0606a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
index 0487ac5b..dbb9981 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
@@ -15,16 +15,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.procedure2;
 
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 
-// TODO: Not used yet
+/**
+ * Indicate that a procedure wants to be rescheduled. Usually because there 
are something wrong but
+ * we do not want to fail the procedure.
+ * 
+ * TODO: need to support scheduling after a delay.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Stable
 public class ProcedureYieldException extends ProcedureException {
+
   /** default constructor */
   public ProcedureYieldException() {
 super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/33d0606a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index 9d0c05d..9699e7b 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -41,6 +41,8 @@ public final class ReplicationUtils {
 
   public static final String REPLICATION_ATTR_NAME = "__rep__";
 
+  public static final String REMOTE_WAL_DIR_NAME = "remoteWALs";
+
   private ReplicationUtils() {
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/33d0606a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index a37fd4e..9bb8858 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.mob.MobConstants;
 import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.replication.ReplicationUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -133,7 +134,6 @@ public class MasterFileSystem {
* Idempotent.
*/
   private void createInitialFileSystemLayout() throws IOException {
-
 final String[] protectedSubDirs = new String[] {
 HConstants.BASE_NAMESPACE_DIR,
 HConstants.HFILE_ARCHIVE_DIRECTORY,
@@ -145,7 +145,8 @@ public class MasterFileSystem {
   HConstants.HREGION_LOGDIR_NAME,
   HConstants.HREGION_OLDLOGDIR_NAME,
   HConstants.CORRUPT_DIR_NAME,
-  WALProcedureStore.MASTER_PROCEDURE_LOGDIR
+  

[14/30] hbase git commit: HBASE-19979 ReplicationSyncUp tool may leak Zookeeper connection

2018-02-14 Thread zhangduo
HBASE-19979 ReplicationSyncUp tool may leak Zookeeper connection


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/39e191e5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/39e191e5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/39e191e5

Branch: refs/heads/HBASE-19064
Commit: 39e191e5598529c68007c96e69acdd923a294d33
Parents: 8e8e1e5
Author: Pankaj Kumar 
Authored: Tue Feb 13 08:59:02 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 13 08:59:02 2018 -0800

--
 .../hbase/replication/regionserver/ReplicationSyncUp.java  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/39e191e5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
index 01a230d..283eb96 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
@@ -111,14 +111,14 @@ public class ReplicationSyncUp extends Configured 
implements Tool {
   while (manager.getOldSources().size() > 0) {
 Thread.sleep(SLEEP_TIME);
   }
+  manager.join();
 } catch (InterruptedException e) {
   System.err.println("didn't wait long enough:" + e);
   return (-1);
+} finally {
+  zkw.close();
 }
 
-manager.join();
-zkw.close();
-
 return 0;
   }
 



[13/30] hbase git commit: HBASE-19844 Shell should support to flush by regionserver

2018-02-14 Thread zhangduo
HBASE-19844 Shell should support to flush by regionserver

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8e8e1e5a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8e8e1e5a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8e8e1e5a

Branch: refs/heads/HBASE-19064
Commit: 8e8e1e5a1bbb240a6f4a71bc8b0271d31da633b3
Parents: ba402b1
Author: Reid Chan 
Authored: Tue Feb 13 14:32:16 2018 +0800
Committer: tedyu 
Committed: Tue Feb 13 08:20:54 2018 -0800

--
 hbase-shell/src/main/ruby/hbase/admin.rb| 21 +++-
 .../src/main/ruby/shell/commands/flush.rb   |  5 -
 hbase-shell/src/test/ruby/hbase/admin_test.rb   |  4 
 3 files changed, 24 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8e8e1e5a/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 0102118..f524380 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -50,12 +50,17 @@ module Hbase
 end
 
 
#--
-# Requests a table or region flush
-def flush(table_or_region_name)
-  @admin.flushRegion(table_or_region_name.to_java_bytes)
-rescue java.lang.IllegalArgumentException => e
+# Requests a table or region or region server flush
+def flush(name)
+  @admin.flushRegion(name.to_java_bytes)
+rescue java.lang.IllegalArgumentException
   # Unknown region. Try table.
-  @admin.flush(TableName.valueOf(table_or_region_name))
+  begin
+@admin.flush(TableName.valueOf(name))
+  rescue java.lang.IllegalArgumentException
+# Unknown table. Try region server.
+@admin.flushRegionServer(ServerName.valueOf(name))
+  end
 end
 
 
#--
@@ -1286,5 +1291,11 @@ module Hbase
   end
   @admin.clearDeadServers(servers).to_a
 end
+
+
#--
+# List live region servers
+def list_liveservers
+  @admin.getClusterStatus.getServers.to_a
+end
   end
 end

http://git-wip-us.apache.org/repos/asf/hbase/blob/8e8e1e5a/hbase-shell/src/main/ruby/shell/commands/flush.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/flush.rb 
b/hbase-shell/src/main/ruby/shell/commands/flush.rb
index 4165b84..1f6b310 100644
--- a/hbase-shell/src/main/ruby/shell/commands/flush.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/flush.rb
@@ -23,11 +23,14 @@ module Shell
   def help
 <<-EOF
 Flush all regions in passed table or pass a region row to
-flush an individual region.  For example:
+flush an individual region or a region server name whose format
+is 'host,port,startcode', to flush all its regions.
+For example:
 
   hbase> flush 'TABLENAME'
   hbase> flush 'REGIONNAME'
   hbase> flush 'ENCODED_REGIONNAME'
+  hbase> flush 'REGION_SERVER_NAME'
 EOF
   end
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8e8e1e5a/hbase-shell/src/test/ruby/hbase/admin_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/admin_test.rb 
b/hbase-shell/src/test/ruby/hbase/admin_test.rb
index cbeb8b6..929484c 100644
--- a/hbase-shell/src/test/ruby/hbase/admin_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/admin_test.rb
@@ -101,6 +101,10 @@ module Hbase
 
 define_test "flush should work" do
   command(:flush, 'hbase:meta')
+  servers = admin.list_liveservers
+  servers.each do |s|
+command(:flush, s.toString)
+  end
 end
 
 
#---



[29/30] hbase git commit: HBASE-19082 Reject read/write from client but accept write from replication in state S

2018-02-14 Thread zhangduo
HBASE-19082 Reject read/write from client but accept write from replication in 
state S


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/19e90b1b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/19e90b1b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/19e90b1b

Branch: refs/heads/HBASE-19064
Commit: 19e90b1b23e16047df78aff2510682071f1e5ba2
Parents: 57b1a8b
Author: zhangduo 
Authored: Mon Feb 12 18:20:18 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:32:08 2018 +0800

--
 .../org/apache/hadoop/hbase/HConstants.java |   3 -
 .../src/main/protobuf/MasterProcedure.proto |   3 +-
 .../hbase/replication/ReplicationUtils.java |   4 +
 ...ransitPeerSyncReplicationStateProcedure.java |  10 +
 .../hadoop/hbase/regionserver/HRegion.java  |   5 +-
 .../hbase/regionserver/HRegionServer.java   |   2 +-
 .../hbase/regionserver/RSRpcServices.java   |  89 +++--
 .../RejectRequestsFromClientStateChecker.java   |  44 
 .../regionserver/ReplicationSink.java   |  72 ---
 .../SyncReplicationPeerInfoProvider.java|  10 +-
 .../SyncReplicationPeerInfoProviderImpl.java|  19 +-
 .../hbase/wal/SyncReplicationWALProvider.java   |   3 +
 .../org/apache/hadoop/hbase/wal/WALFactory.java |   4 +-
 .../hbase/replication/TestSyncReplication.java  | 200 +++
 .../wal/TestSyncReplicationWALProvider.java |   8 +-
 15 files changed, 401 insertions(+), 75 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/19e90b1b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 4664778..1cd6f89 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1351,9 +1351,6 @@ public final class HConstants {
 
   public static final String NOT_IMPLEMENTED = "Not implemented";
 
-  // TODO: need to find a better place to hold it.
-  public static final String SYNC_REPLICATION_ENABLED = 
"hbase.replication.sync.enabled";
-
   private HConstants() {
 // Can't be instantiated with this ctor.
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/19e90b1b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 8cc5c81..2bf634b 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -382,7 +382,8 @@ enum PeerSyncReplicationStateTransitionState {
   REOPEN_ALL_REGIONS_IN_PEER = 5;
   TRANSIT_PEER_NEW_SYNC_REPLICATION_STATE = 6;
   REFRESH_PEER_SYNC_REPLICATION_STATE_ON_RS_END = 7;
-  POST_PEER_SYNC_REPLICATION_STATE_TRANSITION = 8;
+  CREATE_DIR_FOR_REMOTE_WAL = 8;
+  POST_PEER_SYNC_REPLICATION_STATE_TRANSITION = 9;
 }
 
 message PeerModificationStateData {

http://git-wip-us.apache.org/repos/asf/hbase/blob/19e90b1b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index d633be9..9d0c05d 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -37,6 +37,10 @@ import org.apache.yetus.audience.InterfaceAudience;
 @InterfaceAudience.Private
 public final class ReplicationUtils {
 
+  public static final String SYNC_REPLICATION_ENABLED = 
"hbase.replication.sync.enabled";
+
+  public static final String REPLICATION_ATTR_NAME = "__rep__";
+
   private ReplicationUtils() {
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/19e90b1b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
index c253bff..e53abc0 100644
--- 

[08/30] hbase git commit: HBASE-19965 Fix flaky TestAsyncRegionAdminApi

2018-02-14 Thread zhangduo
HBASE-19965 Fix flaky TestAsyncRegionAdminApi


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf57ea15
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf57ea15
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf57ea15

Branch: refs/heads/HBASE-19064
Commit: cf57ea15f1044d69fcfc8edee3c6f710ed78a7e0
Parents: f8c3d45
Author: Michael Stack 
Authored: Mon Feb 12 22:24:50 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 22:27:12 2018 -0800

--
 .../hbase/client/TestAsyncRegionAdminApi.java   | 195 +--
 .../hbase/client/TestAsyncRegionAdminApi2.java  | 241 +++
 2 files changed, 245 insertions(+), 191 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cf57ea15/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index b9afb68..fbb87bb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotEquals;
@@ -27,14 +26,10 @@ import static org.junit.Assert.fail;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Optional;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
-import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.master.HMaster;
@@ -60,30 +55,17 @@ import org.junit.runners.Parameterized;
 
 /**
  * Class to test asynchronous region admin operations.
+ * @see TestAsyncRegionAdminApi2 This test and it used to be joined it was 
taking longer than our
+ * ten minute timeout so they were split.
  */
 @RunWith(Parameterized.class)
 @Category({ LargeTests.class, ClientTests.class })
 public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
   HBaseClassTestRule.forClass(TestAsyncRegionAdminApi.class);
 
   @Test
-  public void testGetRegionLocation() throws Exception {
-RawAsyncHBaseAdmin rawAdmin = (RawAsyncHBaseAdmin) ASYNC_CONN.getAdmin();
-TEST_UTIL.createMultiRegionTable(tableName, HConstants.CATALOG_FAMILY);
-AsyncTableRegionLocator locator = ASYNC_CONN.getRegionLocator(tableName);
-HRegionLocation regionLocation = 
locator.getRegionLocation(Bytes.toBytes("mmm")).get();
-RegionInfo region = regionLocation.getRegion();
-byte[] regionName = regionLocation.getRegion().getRegionName();
-HRegionLocation location = rawAdmin.getRegionLocation(regionName).get();
-assertTrue(Bytes.equals(regionName, location.getRegion().getRegionName()));
-location = 
rawAdmin.getRegionLocation(region.getEncodedNameAsBytes()).get();
-assertTrue(Bytes.equals(regionName, location.getRegion().getRegionName()));
-  }
-
-  @Test
   public void testAssignRegionAndUnassignRegion() throws Exception {
 createTableWithDefaultConf(tableName);
 
@@ -259,175 +241,6 @@ public class TestAsyncRegionAdminApi extends 
TestAsyncAdminBase {
 assertEquals(0, 
regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize());
   }
 
-  @Test
-  public void testSplitSwitch() throws Exception {
-createTableWithDefaultConf(tableName);
-byte[][] families = { FAMILY };
-final int rows = 1;
-loadData(tableName, families, rows);
-
-AsyncTable metaTable = 
ASYNC_CONN.getTable(META_TABLE_NAME);
-List regionLocations =
-AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, 
Optional.of(tableName)).get();
-int originalCount = regionLocations.size();
-
-initSplitMergeSwitch();
-assertTrue(admin.splitSwitch(false).get());
-try {
-  admin.split(tableName, Bytes.toBytes(rows / 2)).join();
-} catch (Exception e){
-  //Expected
-}
-int count = admin.getRegions(tableName).get().size();
-assertTrue(originalCount == count);
-
-assertFalse(admin.splitSwitch(true).get());
-   

[25/30] hbase git commit: HBASE-19957 General framework to transit sync replication state

2018-02-14 Thread zhangduo
HBASE-19957 General framework to transit sync replication state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/57b1a8bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/57b1a8bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/57b1a8bb

Branch: refs/heads/HBASE-19064
Commit: 57b1a8bbd02ff591a7bac10f8c6eb36c5377e660
Parents: 1f31984
Author: zhangduo 
Authored: Fri Feb 9 18:33:28 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../replication/ReplicationPeerConfig.java  |   2 -
 .../replication/ReplicationPeerDescription.java |   5 +-
 .../hbase/replication/SyncReplicationState.java |  19 +-
 .../org/apache/hadoop/hbase/HConstants.java |   3 +
 .../src/main/protobuf/MasterProcedure.proto |  20 ++-
 .../hbase/replication/ReplicationPeerImpl.java  |  45 -
 .../replication/ReplicationPeerStorage.java |  25 ++-
 .../hbase/replication/ReplicationPeers.java |  27 ++-
 .../replication/ZKReplicationPeerStorage.java   |  65 +--
 .../hbase/coprocessor/MasterObserver.java   |   7 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   4 +-
 .../hbase/master/MasterCoprocessorHost.java |  12 +-
 .../replication/AbstractPeerProcedure.java  |  14 +-
 .../master/replication/ModifyPeerProcedure.java |  15 +-
 .../replication/RefreshPeerProcedure.java   |  18 +-
 .../replication/ReplicationPeerManager.java | 107 +++-
 ...ransitPeerSyncReplicationStateProcedure.java | 175 ---
 .../hbase/regionserver/HRegionServer.java   |  35 ++--
 .../regionserver/ReplicationSourceService.java  |  11 +-
 .../regionserver/PeerActionListener.java|   4 +-
 .../regionserver/PeerProcedureHandler.java  |  16 +-
 .../regionserver/PeerProcedureHandlerImpl.java  |  55 +-
 .../regionserver/RefreshPeerCallable.java   |   7 +
 .../replication/regionserver/Replication.java   |  22 ++-
 .../regionserver/ReplicationSourceManager.java  |  41 +++--
 .../SyncReplicationPeerInfoProvider.java|  43 +
 .../SyncReplicationPeerInfoProviderImpl.java|  71 
 .../SyncReplicationPeerMappingManager.java  |  48 +
 .../SyncReplicationPeerProvider.java|  35 
 .../hbase/wal/SyncReplicationWALProvider.java   |  35 ++--
 .../org/apache/hadoop/hbase/wal/WALFactory.java |  47 ++---
 .../replication/TestReplicationAdmin.java   |   3 +-
 .../TestReplicationSourceManager.java   |   5 +-
 .../wal/TestSyncReplicationWALProvider.java |  36 ++--
 34 files changed, 752 insertions(+), 325 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/57b1a8bb/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index 69565a7..79b3a1d 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.replication;
 
 import java.util.Collection;
@@ -25,7 +24,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
-
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/57b1a8bb/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
index 2d077c5..b0c27bb 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
@@ -20,7 +20,10 @@ package org.apache.hadoop.hbase.replication;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
- * The POJO equivalent of ReplicationProtos.ReplicationPeerDescription
+ * The POJO equivalent of ReplicationProtos.ReplicationPeerDescription.
+ * 
+ * To developer, here we do not store the new sync replication state since it 
is just an
+ * intermediate state and this class is 

[24/30] hbase git commit: HBASE-19957 General framework to transit sync replication state

2018-02-14 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/57b1a8bb/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 85b2e85..7b8c43b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -54,6 +54,7 @@ import 
org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationListener;
 import org.apache.hadoop.hbase.replication.ReplicationPeer;
 import org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState;
+import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeers;
 import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
 import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
@@ -136,6 +137,8 @@ public class ReplicationSourceManager implements 
ReplicationListener {
   // For recovered source, the queue id's format is peer_id-servername-*
   private final ConcurrentMap> 
walsByIdRecoveredQueues;
 
+  private final SyncReplicationPeerMappingManager 
syncReplicationPeerMappingManager;
+
   private final Configuration conf;
   private final FileSystem fs;
   // The paths to the latest log of each wal group, for new coming peers
@@ -172,9 +175,8 @@ public class ReplicationSourceManager implements 
ReplicationListener {
   public ReplicationSourceManager(ReplicationQueueStorage queueStorage,
   ReplicationPeers replicationPeers, ReplicationTracker 
replicationTracker, Configuration conf,
   Server server, FileSystem fs, Path logDir, Path oldLogDir, UUID 
clusterId,
-  WALFileLengthProvider walFileLengthProvider) throws IOException {
-// CopyOnWriteArrayList is thread-safe.
-// Generally, reading is more than modifying.
+  WALFileLengthProvider walFileLengthProvider,
+  SyncReplicationPeerMappingManager syncReplicationPeerMappingManager) 
throws IOException {
 this.sources = new ConcurrentHashMap<>();
 this.queueStorage = queueStorage;
 this.replicationPeers = replicationPeers;
@@ -187,10 +189,11 @@ public class ReplicationSourceManager implements 
ReplicationListener {
 this.fs = fs;
 this.logDir = logDir;
 this.oldLogDir = oldLogDir;
-this.sleepBeforeFailover = 
conf.getLong("replication.sleep.before.failover", 3); // 30
-   
  // seconds
+// 30 seconds
+this.sleepBeforeFailover = 
conf.getLong("replication.sleep.before.failover", 3);
 this.clusterId = clusterId;
 this.walFileLengthProvider = walFileLengthProvider;
+this.syncReplicationPeerMappingManager = syncReplicationPeerMappingManager;
 this.replicationTracker.registerListener(this);
 // It's preferable to failover 1 RS at a time, but with good zk servers
 // more could be processed at the same time.
@@ -254,8 +257,11 @@ public class ReplicationSourceManager implements 
ReplicationListener {
   }
 
   /**
-   * 1. Add peer to replicationPeers 2. Add the normal source and related 
replication queue 3. Add
-   * HFile Refs
+   * 
+   * Add peer to replicationPeers
+   * Add the normal source and related replication queue
+   * Add HFile Refs
+   * 
* @param peerId the id of replication peer
*/
   public void addPeer(String peerId) throws IOException {
@@ -274,13 +280,16 @@ public class ReplicationSourceManager implements 
ReplicationListener {
   }
 
   /**
-   * 1. Remove peer for replicationPeers 2. Remove all the recovered sources 
for the specified id
-   * and related replication queues 3. Remove the normal source and related 
replication queue 4.
-   * Remove HFile Refs
+   * 
+   * Remove peer for replicationPeers
+   * Remove all the recovered sources for the specified id and related 
replication queues
+   * Remove the normal source and related replication queue
+   * Remove HFile Refs
+   * 
* @param peerId the id of the replication peer
*/
   public void removePeer(String peerId) {
-replicationPeers.removePeer(peerId);
+ReplicationPeer peer = replicationPeers.removePeer(peerId);
 String terminateMessage = "Replication stream was removed by a user";
 List oldSourcesToDelete = new ArrayList<>();
 // synchronized on oldsources to avoid adding recovered source for the 
to-be-removed peer
@@ -311,7 +320,10 @@ public class ReplicationSourceManager implements 
ReplicationListener {
   deleteQueue(peerId);
   

[04/30] hbase git commit: HBASE-19986 If HBaseTestClassRule timesout a test, thread dump

2018-02-14 Thread zhangduo
HBASE-19986 If HBaseTestClassRule timesout a test, thread dump


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c2ee82c9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c2ee82c9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c2ee82c9

Branch: refs/heads/HBASE-19064
Commit: c2ee82c9091a721e22a0eb69be17cd0217739099
Parents: 00f8877
Author: Michael Stack 
Authored: Mon Feb 12 14:00:35 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 15:28:40 2018 -0800

--
 .../apache/hadoop/hbase/HBaseClassTestRule.java |   3 +-
 .../org/apache/hadoop/hbase/TestTimeout.java|  15 +-
 .../hadoop/hbase/TimedOutTestsListener.java | 177 +++
 .../hadoop/hbase/TimedOutTestsListener.java | 176 --
 pom.xml |   2 +-
 5 files changed, 194 insertions(+), 179 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c2ee82c9/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
index bcde826..b964872 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase;
 
 import java.util.concurrent.TimeUnit;
+
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c2ee82c9/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
index 343108e..495667c 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase;
 
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Threads;
 import org.junit.ClassRule;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -41,6 +42,18 @@ public class TestTimeout {
  */
 @Ignore @Test
 public void infiniteLoop() {
-while (true) {}
+  // Launch a background non-daemon thread.
+  Thread t = new Thread("HangingThread") {
+public void run() {
+  synchronized(this) {
+while(true) {
+}
+  }
+}
+  };
+  t.start();
+  while (true) {
+// Just hang out too.
+  }
}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c2ee82c9/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
new file mode 100644
index 000..d5c87f3
--- /dev/null
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.lang.management.LockInfo;
+import java.lang.management.ManagementFactory;
+import java.lang.management.MonitorInfo;
+import 

[22/30] hbase git commit: HBASE-19083 Introduce a new log writer which can write to two HDFSes

2018-02-14 Thread zhangduo
HBASE-19083 Introduce a new log writer which can write to two HDFSes


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d93877bf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d93877bf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d93877bf

Branch: refs/heads/HBASE-19064
Commit: d93877bf3b27abf0f96a37ccd08e7f356785ee79
Parents: d0f2d18
Author: zhangduo 
Authored: Thu Jan 11 21:08:02 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../hbase/regionserver/wal/AsyncFSWAL.java  |  16 ++-
 .../regionserver/wal/CombinedAsyncWriter.java   | 134 ++
 .../hbase/regionserver/wal/DualAsyncFSWAL.java  |  67 +
 .../wal/AbstractTestProtobufLog.java| 110 +++
 .../regionserver/wal/ProtobufLogTestHelper.java |  99 ++
 .../regionserver/wal/TestAsyncProtobufLog.java  |  32 +
 .../wal/TestCombinedAsyncWriter.java| 136 +++
 .../hbase/regionserver/wal/TestProtobufLog.java |  14 +-
 .../regionserver/wal/WriterOverAsyncWriter.java |  63 +
 9 files changed, 531 insertions(+), 140 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d93877bf/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index 19d89df..8e57441 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -606,12 +606,16 @@ public class AsyncFSWAL extends 
AbstractFSWAL {
 }
   }
 
-  @Override
-  protected AsyncWriter createWriterInstance(Path path) throws IOException {
+  protected final AsyncWriter createAsyncWriter(FileSystem fs, Path path) 
throws IOException {
 return AsyncFSWALProvider.createAsyncWriter(conf, fs, path, false, 
eventLoopGroup,
   channelClass);
   }
 
+  @Override
+  protected AsyncWriter createWriterInstance(Path path) throws IOException {
+return createAsyncWriter(fs, path);
+  }
+
   private void waitForSafePoint() {
 consumeLock.lock();
 try {
@@ -654,13 +658,13 @@ public class AsyncFSWAL extends 
AbstractFSWAL {
 } finally {
   consumeLock.unlock();
 }
-return executeClose(closeExecutor, oldWriter);
+return executeClose(oldWriter);
   }
 
   @Override
   protected void doShutdown() throws IOException {
 waitForSafePoint();
-executeClose(closeExecutor, writer);
+executeClose(writer);
 closeExecutor.shutdown();
 try {
   if (!closeExecutor.awaitTermination(waitOnShutdownInSeconds, 
TimeUnit.SECONDS)) {
@@ -698,7 +702,7 @@ public class AsyncFSWAL extends AbstractFSWAL {
 }
   }
 
-  private static long executeClose(ExecutorService closeExecutor, AsyncWriter 
writer) {
+  protected final long executeClose(AsyncWriter writer) {
 long fileLength;
 if (writer != null) {
   fileLength = writer.getLength();
@@ -706,7 +710,7 @@ public class AsyncFSWAL extends AbstractFSWAL {
 try {
   writer.close();
 } catch (IOException e) {
-  LOG.warn("close old writer failed", e);
+  LOG.warn("close writer failed", e);
 }
   });
 } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/d93877bf/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
new file mode 100644
index 000..8ecfede
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY 

[09/30] hbase git commit: HBASE-19965 Fix flaky TestAsyncRegionAdminApi; ADDENDUM do same to TestAsyncTableAdminAPI

2018-02-14 Thread zhangduo
HBASE-19965 Fix flaky TestAsyncRegionAdminApi; ADDENDUM do same to 
TestAsyncTableAdminAPI


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b4622ffa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b4622ffa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b4622ffa

Branch: refs/heads/HBASE-19064
Commit: b4622ffad7011b3d29bce6376edb896de6df9f6e
Parents: cf57ea1
Author: Michael Stack 
Authored: Mon Feb 12 22:52:56 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 22:52:56 2018 -0800

--
 .../hbase/client/TestAsyncTableAdminApi.java| 249 +--
 .../hbase/client/TestAsyncTableAdminApi2.java   | 299 +++
 2 files changed, 301 insertions(+), 247 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b4622ffa/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
index 597a3d7..a3129f8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
@@ -30,11 +30,9 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
-import java.util.Set;
 import java.util.concurrent.CompletionException;
 import java.util.concurrent.ExecutionException;
 import java.util.regex.Pattern;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HConstants;
@@ -43,12 +41,9 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor;
 import org.apache.hadoop.hbase.master.LoadBalancer;
-import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.FSTableDescriptors;
-import org.apache.hadoop.hbase.util.FSUtils;
 import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Test;
@@ -58,6 +53,8 @@ import org.junit.runners.Parameterized;
 
 /**
  * Class to test asynchronous table admin operations.
+ * @see TestAsyncTableAdminApi2 This test and it used to be joined it was 
taking longer than our
+ * ten minute timeout so they were split.
  */
 @RunWith(Parameterized.class)
 @Category({ LargeTests.class, ClientTests.class })
@@ -591,161 +588,6 @@ public class TestAsyncTableAdminApi extends 
TestAsyncAdminBase {
   }
 
   @Test
-  public void testDisableCatalogTable() throws Exception {
-try {
-  this.admin.disableTable(TableName.META_TABLE_NAME).join();
-  fail("Expected to throw ConstraintException");
-} catch (Exception e) {
-}
-// Before the fix for HBASE-6146, the below table creation was failing as 
the hbase:meta table
-// actually getting disabled by the disableTable() call.
-createTableWithDefaultConf(tableName);
-  }
-
-  @Test
-  public void testAddColumnFamily() throws Exception {
-// Create a table with two families
-TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
-builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0));
-admin.createTable(builder.build()).join();
-admin.disableTable(tableName).join();
-// Verify the table descriptor
-verifyTableDescriptor(tableName, FAMILY_0);
-
-// Modify the table removing one family and verify the descriptor
-admin.addColumnFamily(tableName, 
ColumnFamilyDescriptorBuilder.of(FAMILY_1)).join();
-verifyTableDescriptor(tableName, FAMILY_0, FAMILY_1);
-  }
-
-  @Test
-  public void testAddSameColumnFamilyTwice() throws Exception {
-// Create a table with one families
-TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
-builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0));
-admin.createTable(builder.build()).join();
-admin.disableTable(tableName).join();
-// Verify the table descriptor
-verifyTableDescriptor(tableName, FAMILY_0);
-
-// Modify the table removing one family and verify the descriptor
-admin.addColumnFamily(tableName, 
ColumnFamilyDescriptorBuilder.of(FAMILY_1)).join();
-verifyTableDescriptor(tableName, FAMILY_0, FAMILY_1);
-
-try {
-  // Add same column family again - 

[16/30] hbase git commit: HBASE-19998 Flakey TestVisibilityLabelsWithDefaultVisLabelService

2018-02-14 Thread zhangduo
HBASE-19998 Flakey TestVisibilityLabelsWithDefaultVisLabelService


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f1b3eab
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f1b3eab
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f1b3eab

Branch: refs/heads/HBASE-19064
Commit: 2f1b3eab675ac327a6f61b724d5f0bce01ec6e68
Parents: 6e35f5e
Author: Michael Stack 
Authored: Tue Feb 13 22:30:34 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 13 22:32:20 2018 -0800

--
 .../hadoop/hbase/master/assignment/MoveRegionProcedure.java   | 1 +
 .../hadoop/hbase/master/procedure/MasterProcedureScheduler.java   | 2 +-
 .../hadoop/hbase/master/procedure/ServerCrashProcedure.java   | 3 +--
 3 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1b3eab/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
index a29bfee..4e7cde6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
@@ -54,6 +54,7 @@ public class MoveRegionProcedure extends 
AbstractStateMachineRegionProcedurehttp://git-wip-us.apache.org/repos/asf/hbase/blob/2f1b3eab/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index 0400de4..936540d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -880,7 +880,7 @@ public class MasterProcedureScheduler extends 
AbstractProcedureScheduler {
   boolean hasLock = true;
   final LockAndQueue[] regionLocks = new LockAndQueue[regionInfo.length];
   for (int i = 0; i < regionInfo.length; ++i) {
-LOG.info(procedure + " " + table + " " + 
regionInfo[i].getRegionNameAsString());
+LOG.info(procedure + ", table=" + table + ", " + 
regionInfo[i].getRegionNameAsString());
 assert table != null;
 assert regionInfo[i] != null;
 assert regionInfo[i].getTable() != null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1b3eab/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index c18d725..7352826 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -189,8 +189,7 @@ implements ServerProcedureInterface {
* @throws IOException
*/
   private void processMeta(final MasterProcedureEnv env) throws IOException {
-if (LOG.isDebugEnabled()) LOG.debug(this + "; Processing hbase:meta that 
was on " +
-this.serverName);
+LOG.debug("{}; processing hbase:meta", this);
 
 // Assign meta if still carrying it. Check again: region may be assigned 
because of RIT timeout
 final AssignmentManager am = 
env.getMasterServices().getAssignmentManager();



[27/30] hbase git commit: HBASE-19935 Only allow table replication for sync replication for now

2018-02-14 Thread zhangduo
HBASE-19935 Only allow table replication for sync replication for now


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1f319847
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1f319847
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1f319847

Branch: refs/heads/HBASE-19064
Commit: 1f31984722b5f98d7bc177cf46dc79a94f4d4c52
Parents: 83a91d6
Author: Guanghao Zhang 
Authored: Tue Feb 6 16:00:59 2018 +0800
Committer: zhangduo 
Committed: Thu Feb 15 12:30:26 2018 +0800

--
 .../replication/ReplicationPeerConfig.java  |  9 +++
 .../replication/ReplicationPeerManager.java | 34 -
 .../replication/TestReplicationAdmin.java   | 73 ++--
 .../wal/TestCombinedAsyncWriter.java|  6 ++
 .../wal/TestSyncReplicationWALProvider.java |  6 ++
 5 files changed, 102 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1f319847/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index 4c10c46..69565a7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -25,6 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
+
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
@@ -217,6 +219,13 @@ public class ReplicationPeerConfig {
 return this.remoteWALDir;
   }
 
+  /**
+   * Use remote wal dir to decide whether a peer is sync replication peer
+   */
+  public boolean isSyncReplication() {
+return !StringUtils.isBlank(this.remoteWALDir);
+  }
+
   public static ReplicationPeerConfigBuilder newBuilder() {
 return new ReplicationPeerConfigBuilderImpl();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f319847/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
index 9336fbd..6bfd9c9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
@@ -167,7 +167,7 @@ public class ReplicationPeerManager {
   " does not match new remote wal dir '" + 
peerConfig.getRemoteWALDir() + "'");
 }
 
-if (oldPeerConfig.getRemoteWALDir() != null) {
+if (oldPeerConfig.isSyncReplication()) {
   if (!ReplicationUtils.isKeyConfigEqual(oldPeerConfig, peerConfig)) {
 throw new DoNotRetryIOException(
 "Changing the replicated namespace/table config on a synchronous 
replication "
@@ -195,8 +195,8 @@ public class ReplicationPeerManager {
 }
 ReplicationPeerConfig copiedPeerConfig = 
ReplicationPeerConfig.newBuilder(peerConfig).build();
 SyncReplicationState syncReplicationState =
-StringUtils.isBlank(peerConfig.getRemoteWALDir()) ? 
SyncReplicationState.NONE
-: SyncReplicationState.DOWNGRADE_ACTIVE;
+copiedPeerConfig.isSyncReplication() ? 
SyncReplicationState.DOWNGRADE_ACTIVE
+: SyncReplicationState.NONE;
 peerStorage.addPeer(peerId, copiedPeerConfig, enabled, 
syncReplicationState);
 peers.put(peerId,
   new ReplicationPeerDescription(peerId, enabled, copiedPeerConfig, 
syncReplicationState));
@@ -316,9 +316,37 @@ public class ReplicationPeerManager {
 peerConfig.getTableCFsMap());
 }
 
+if (peerConfig.isSyncReplication()) {
+  checkPeerConfigForSyncReplication(peerConfig);
+}
+
 checkConfiguredWALEntryFilters(peerConfig);
   }
 
+  private void checkPeerConfigForSyncReplication(ReplicationPeerConfig 
peerConfig)
+  throws DoNotRetryIOException {
+// This is used to reduce the difficulty for implementing the sync 
replication state transition
+// as we need to reopen all the related regions.
+// TODO: Add namespace, replicat_all flag back
+if (peerConfig.replicateAllUserTables()) {
+  throw new DoNotRetryIOException(
+  "Only 

[07/30] hbase git commit: HBASE-19960 Doc test timeouts and test categories in hbase2

2018-02-14 Thread zhangduo
HBASE-19960 Doc test timeouts and test categories in hbase2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f8c3d456
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f8c3d456
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f8c3d456

Branch: refs/heads/HBASE-19064
Commit: f8c3d4568e02ba7c65d0ff82a644e8757c55ff7b
Parents: 24bed6b
Author: Michael Stack 
Authored: Mon Feb 12 20:52:01 2018 -0800
Committer: Michael Stack 
Committed: Mon Feb 12 20:53:42 2018 -0800

--
 .../hbase/testclassification/LargeTests.java| 19 ++---
 .../hbase/testclassification/MediumTests.java   | 16 ++--
 .../hbase/testclassification/SmallTests.java| 16 ++--
 src/main/asciidoc/_chapters/developer.adoc  | 89 ++--
 4 files changed, 86 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c3d456/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/LargeTests.java
--
diff --git 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/LargeTests.java
 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/LargeTests.java
index eb4a66a..aa183d5 100644
--- 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/LargeTests.java
+++ 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/LargeTests.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,18 +19,17 @@
 package org.apache.hadoop.hbase.testclassification;
 
 /**
- * Tag a test as 'large', meaning that the test class has the following
- * characteristics:
+ * Tagging a test as 'large', means that the test class has the following 
characteristics:
  * 
- *  executed in an isolated JVM. Tests can however be executed in 
different JVM on the same
- *  machine simultaneously.
- *  will not have to be executed by the developer before submitting a 
bug
- *  ideally, last less than 2 minutes to help parallelization
+ *  it can executed in an isolated JVM (Tests can however be executed in 
different JVM on the
+ *  same  machine simultaneously so be careful two concurrent tests end up 
fighting over ports
+ *  or other singular resources).
+ *  ideally, the whole large test-suite/class, no matter how many or how 
few test methods it
+ *  has, will run in last less than three minutes
+ *  No large test can take longer than ten minutes; it will be killed. See 
'Integeration Tests'
+ *  if you need to run tests longer than this.
  * 
  *
- *  It the worst case compared to small or medium, use it only for tests that
- *you cannot put in the other categories
- *
  * @see SmallTests
  * @see MediumTests
  * @see IntegrationTests

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c3d456/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/MediumTests.java
--
diff --git 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/MediumTests.java
 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/MediumTests.java
index 55f81cb..0f8055b 100644
--- 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/MediumTests.java
+++ 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/MediumTests.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,16 +19,16 @@
 package org.apache.hadoop.hbase.testclassification;
 
 /**
- * Tag a test as 'Medium', meaning that the test class has the following
- * characteristics:
+ * Tagging a test as 'medium' means that the test class has the following 
characteristics:
  * 
- *  executed in an isolated JVM. Tests can however be executed in 
different JVM on the same
- *  machine simultaneously.
- *  will have to be executed by the developer before submitting a bug
- *  ideally, last less than 1 minutes to help parallelization
+ *  it can be executed in an isolated JVM (Tests can however be executed 
in different JVMs on
+ *  the  same  machine simultaneously so be careful two concurrent tests end 
up fighting over ports
+ *  or other singular resources).
+ *  ideally, the whole medium test-suite/class, no matter how many or how 
few test methods it
+ *  has, will complete in 50 seconds; otherwise make it a 'large' test.
  * 
  *
- *  Use it for tests that cannot be 

[15/30] hbase git commit: HBASE-19965 Fix flaky TestAsyncRegionAdminApi; ADDENDUM do same to TestAsyncTableAdminAPI AGAIN (make TestAsyncTableAdminAPI3... already an TestAsyncTableAdminAPI2)

2018-02-14 Thread zhangduo
HBASE-19965 Fix flaky TestAsyncRegionAdminApi; ADDENDUM do same to 
TestAsyncTableAdminAPI AGAIN (make TestAsyncTableAdminAPI3... already an 
TestAsyncTableAdminAPI2)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e35f5ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e35f5ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e35f5ea

Branch: refs/heads/HBASE-19064
Commit: 6e35f5eab9799e5d1450f97dec9335800dd39c03
Parents: 39e191e
Author: Michael Stack 
Authored: Tue Feb 13 21:04:50 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 13 21:06:00 2018 -0800

--
 .../hbase/client/TestAsyncTableAdminApi.java| 234 +--
 .../hbase/client/TestAsyncTableAdminApi3.java   | 282 +++
 2 files changed, 284 insertions(+), 232 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6e35f5ea/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
index a3129f8..bc3a651 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
@@ -31,20 +31,16 @@ import java.util.List;
 import java.util.Map;
 import java.util.Optional;
 import java.util.concurrent.CompletionException;
-import java.util.concurrent.ExecutionException;
-import java.util.regex.Pattern;
 import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor;
 import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -55,6 +51,7 @@ import org.junit.runners.Parameterized;
  * Class to test asynchronous table admin operations.
  * @see TestAsyncTableAdminApi2 This test and it used to be joined it was 
taking longer than our
  * ten minute timeout so they were split.
+ * @see TestAsyncTableAdminApi3 Another split out from this class so each runs 
under ten minutes.
  */
 @RunWith(Parameterized.class)
 @Category({ LargeTests.class, ClientTests.class })
@@ -65,82 +62,6 @@ public class TestAsyncTableAdminApi extends 
TestAsyncAdminBase {
   HBaseClassTestRule.forClass(TestAsyncTableAdminApi.class);
 
   @Test
-  public void testTableExist() throws Exception {
-boolean exist;
-exist = admin.tableExists(tableName).get();
-assertEquals(false, exist);
-TEST_UTIL.createTable(tableName, FAMILY);
-exist = admin.tableExists(tableName).get();
-assertEquals(true, exist);
-exist = admin.tableExists(TableName.META_TABLE_NAME).get();
-assertEquals(true, exist);
-  }
-
-  @Test
-  public void testListTables() throws Exception {
-int numTables = admin.listTableDescriptors().get().size();
-final TableName tableName1 = TableName.valueOf(tableName.getNameAsString() 
+ "1");
-final TableName tableName2 = TableName.valueOf(tableName.getNameAsString() 
+ "2");
-final TableName tableName3 = TableName.valueOf(tableName.getNameAsString() 
+ "3");
-TableName[] tables = new TableName[] { tableName1, tableName2, tableName3 
};
-for (int i = 0; i < tables.length; i++) {
-  createTableWithDefaultConf(tables[i]);
-}
-
-List tableDescs = admin.listTableDescriptors().get();
-int size = tableDescs.size();
-assertTrue(size >= tables.length);
-for (int i = 0; i < tables.length && i < size; i++) {
-  boolean found = false;
-  for (int j = 0; j < size; j++) {
-if (tableDescs.get(j).getTableName().equals(tables[i])) {
-  found = true;
-  break;
-}
-  }
-  assertTrue("Not found: " + tables[i], found);
-}
-
-List tableNames = admin.listTableNames().get();
-size = tableNames.size();
-assertTrue(size == (numTables + tables.length));
-for (int i = 0; i < tables.length && i < size; i++) {
-  boolean found = false;
-  for (int j = 0; j < size; j++) {
-if (tableNames.get(j).equals(tables[i])) {
-  found = true;
-  

hbase git commit: HBASE-19996 Some nonce procs might not be cleaned up (follow up HBASE-19756)

2018-02-14 Thread toffer
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 a4116b243 -> 1bd38fde4


HBASE-19996 Some nonce procs might not be cleaned up (follow up HBASE-19756)

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1bd38fde
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1bd38fde
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1bd38fde

Branch: refs/heads/branch-1.3
Commit: 1bd38fde498c3f704e576a4e87ea2697804467e2
Parents: a4116b2
Author: Thiruvel Thirumoolan 
Authored: Tue Feb 13 17:38:16 2018 -0800
Committer: Francis Liu 
Committed: Wed Feb 14 19:46:54 2018 -0800

--
 .../hbase/procedure2/ProcedureExecutor.java |  30 +++--
 .../client/TestNonceProcCleanerOnFailure.java   | 101 --
 .../hbase/procedure/TestFailedProcCleanup.java  | 135 +++
 3 files changed, 157 insertions(+), 109 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1bd38fde/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 51fd62f..420c499 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -52,6 +52,7 @@ import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator
 import org.apache.hadoop.hbase.procedure2.util.StringUtils;
 import org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue;
 import 
org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue.TimeoutRetriever;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos;
 import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -178,17 +179,19 @@ public class ProcedureExecutor {
 // TODO: Select TTL based on Procedure type
 if ((procInfo.hasClientAckTime() && (now - 
procInfo.getClientAckTime()) >= evictAckTtl) ||
 (now - procInfo.getLastUpdate()) >= evictTtl) {
-  if (isDebugEnabled) {
-LOG.debug("Evict completed procedure: " + procInfo);
+  // Failed Procedures aren't persisted in WAL.
+  if (!(procInfo instanceof FailedProcedureInfo)) {
+store.delete(entry.getKey());
   }
+  it.remove();
+
   NonceKey nonceKey = procInfo.getNonceKey();
-  // Nonce procedures aren't persisted in WAL.
-  if (nonceKey == null) {
-store.delete(entry.getKey());
-  } else {
+  if (nonceKey != null) {
 nonceKeysToProcIdsMap.remove(nonceKey);
   }
-  it.remove();
+  if (isDebugEnabled) {
+LOG.debug("Evict completed procedure: " + procInfo);
+  }
 }
   }
 }
@@ -698,7 +701,7 @@ public class ProcedureExecutor {
 if (procId == null || completed.containsKey(procId)) return;
 
 final long currentTime = EnvironmentEdgeManager.currentTime();
-final ProcedureInfo result = new ProcedureInfo(
+final ProcedureInfo result = new FailedProcedureInfo(
   procId.longValue(),
   procName,
   procOwner != null ? procOwner.getShortName() : null,
@@ -712,6 +715,17 @@ public class ProcedureExecutor {
 completed.putIfAbsent(procId, result);
   }
 
+  public static class FailedProcedureInfo extends ProcedureInfo {
+
+public FailedProcedureInfo(long procId, String procName, String procOwner,
+ProcedureState procState, long parentId, NonceKey nonceKey,
+ErrorHandlingProtos.ForeignExceptionMessage exception, long 
lastUpdate, long startTime,
+byte[] result) {
+  super(procId, procName, procOwner, procState, parentId, nonceKey, 
exception, lastUpdate,
+  startTime, result);
+}
+  }
+
   // ==
   //  Submit/Abort Procedure
   // ==

http://git-wip-us.apache.org/repos/asf/hbase/blob/1bd38fde/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestNonceProcCleanerOnFailure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestNonceProcCleanerOnFailure.java
 

hbase git commit: HBASE-19468 FNFE during scans and flushes (Ram)

2018-02-14 Thread toffer
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 9b1f379f2 -> a4116b243


HBASE-19468 FNFE during scans and flushes (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4116b24
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4116b24
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4116b24

Branch: refs/heads/branch-1.3
Commit: a4116b24347387909a0c717829d5cb616e6e1aef
Parents: 9b1f379
Author: ramkrish86 
Authored: Wed Dec 20 17:02:56 2017 +0530
Committer: Francis Liu 
Committed: Wed Feb 14 19:01:43 2018 -0800

--
 .../hadoop/hbase/regionserver/StoreScanner.java | 29 +---
 .../TestCompactedHFilesDischarger.java  | 46 +++-
 2 files changed, 68 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a4116b24/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index d42852a..c95151b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.regionserver;
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
 import java.util.NavigableSet;
 import java.util.concurrent.CountDownLatch;
@@ -129,8 +130,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private boolean scanUsePread = false;
   // Indicates whether there was flush during the course of the scan
   private volatile boolean flushed = false;
+
   // generally we get one file from a flush
-  private final List flushedStoreFiles = new 
ArrayList(1);
+  private final List flushedstoreFileScanners =
+  new ArrayList(1);
   // generally we get one memstroe scanner from a flush
   private final List memStoreScannersAfterFlush = new 
ArrayList<>(1);
   // The current list of scanners
@@ -444,6 +447,10 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 this.closing = true;
 clearAndClose(scannersForDelayedClose);
 clearAndClose(memStoreScannersAfterFlush);
+// clear them at any case. In case scanner.next() was never called
+// and there were some lease expiry we need to close all the scanners
+// on the flushed files which are open
+clearAndClose(flushedstoreFileScanners);
 // Under test, we dont have a this.store
 if (this.store != null)
   this.store.deleteChangedReaderObserver(this);
@@ -803,7 +810,17 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 flushLock.lock();
 try {
   flushed = true;
-  flushedStoreFiles.addAll(sfs);
+  final boolean isCompaction = false;
+  boolean usePread = get || scanUsePread;
+  // SEE HBASE-19468 where the flushed files are getting compacted even 
before a scanner
+  // calls next(). So its better we create scanners here rather than 
next() call. Ensure
+  // these scanners are properly closed() whether or not the scan is 
completed successfully
+  // Eagerly creating scanners so that we have the ref counting ticking on 
the newly created
+  // store files. In case of stream scanners this eager creation does not 
induce performance
+  // penalty because in scans (that uses stream scanners) the next() call 
is bound to happen.   
+  List scanners = store.getScanners(sfs, cacheBlocks, 
get, usePread,
+isCompaction, matcher, scan.getStartRow(), scan.getStopRow(), 
this.readPt, false);
+  flushedstoreFileScanners.addAll(scanners);
   if (!CollectionUtils.isEmpty(memStoreScanners)) {
 clearAndClose(memStoreScannersAfterFlush);
 memStoreScannersAfterFlush.addAll(memStoreScanners);
@@ -871,13 +888,13 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 List scanners = null;
 flushLock.lock();
 try {
-  List allScanners = new 
ArrayList<>(flushedStoreFiles.size() + memStoreScannersAfterFlush.size());
-  allScanners.addAll(store.getScanners(flushedStoreFiles, cacheBlocks, 
get, usePread,
-isCompaction, matcher, scan.getStartRow(), scan.getStopRow(), 
this.readPt, false));
+  List allScanners =
+  new ArrayList<>(flushedstoreFileScanners.size() + 
memStoreScannersAfterFlush.size());
+  allScanners.addAll(flushedstoreFileScanners);
   allScanners.addAll(memStoreScannersAfterFlush);
 

[hbase] Git Push Summary

2018-02-14 Thread apurtell
Repository: hbase
Updated Tags:  refs/tags/1.4.2RC0 [created] ec3276f53


[8/8] hbase git commit: HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of a KeeperException

2018-02-14 Thread apurtell
HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of 
a KeeperException

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2d83c763
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2d83c763
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2d83c763

Branch: refs/heads/branch-1.4
Commit: 2d83c7637a27603bc6b47795e5c7bfd7688e220e
Parents: 3c5871b
Author: Ben Lau 
Authored: Wed Feb 14 11:36:04 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 17:26:31 2018 -0800

--
 .../hbase/replication/ReplicationQueues.java|  3 +-
 .../ReplicationQueuesClientZKImpl.java  |  5 ++
 .../replication/ReplicationQueuesZKImpl.java| 10 
 .../replication/ReplicationStateZKBase.java |  8 ++-
 .../cleaner/ReplicationZKLockCleanerChore.java  |  4 +-
 .../master/ReplicationLogCleaner.java   | 10 +++-
 .../hbase/master/cleaner/TestLogsCleaner.java   | 54 
 7 files changed, 79 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2d83c763/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
index 2409111..ccc7172 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
@@ -126,8 +126,9 @@ public interface ReplicationQueues {
* Get a list of all region servers that have outstanding replication 
queues. These servers could
* be alive, dead or from a previous run of the cluster.
* @return a list of server names
+   * @throws ReplicationException
*/
-  List getListOfReplicators();
+  List getListOfReplicators() throws ReplicationException;
 
   /**
* Checks if the provided znode is the same as this region server's

http://git-wip-us.apache.org/repos/asf/hbase/blob/2d83c763/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index 9078e40..14b4334 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -49,6 +49,11 @@ public class ReplicationQueuesClientZKImpl extends 
ReplicationStateZKBase implem
   }
 
   @Override
+  public List getListOfReplicators() throws KeeperException {
+return super.getListOfReplicatorsZK();
+  }
+
+  @Override
   public List getLogsInQueue(String serverName, String queueId) throws 
KeeperException {
 String znode = ZKUtil.joinZNode(this.queuesZNode, serverName);
 znode = ZKUtil.joinZNode(znode, queueId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2d83c763/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index a1bd829..dda9adf 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -103,6 +103,16 @@ public class ReplicationQueuesZKImpl extends 
ReplicationStateZKBase implements R
   }
 
   @Override
+  public List getListOfReplicators() throws ReplicationException {
+try {
+  return super.getListOfReplicatorsZK();
+} catch (KeeperException e) {
+  LOG.warn("getListOfReplicators() from ZK failed", e);
+  throw new ReplicationException("getListOfReplicators() from ZK failed", 
e);
+}
+  }
+
+  @Override
   public void removeQueue(String queueId) {
 try {
   ZKUtil.deleteNodeRecursively(this.zookeeper, 
ZKUtil.joinZNode(this.myQueuesZnode, queueId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/2d83c763/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java

[1/8] hbase git commit: HBASE-19970 Remove unused functions from TableAuthManager.

2018-02-14 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e65004aee -> 28ebd29f0
  refs/heads/branch-1.2 0f3bf5489 -> ef847f841
  refs/heads/branch-1.3 0507413fe -> 9b1f379f2
  refs/heads/branch-1.4 7446b8eaf -> 9519ec2ea
  refs/heads/branch-2 9cbf936f9 -> 0743bda05


HBASE-19970 Remove unused functions from TableAuthManager.

Functions deleted: setTableUserPermissions, setTableGroupPermissions, 
setNamespaceUserPermissions,
setNamespaceGroupPermissions, writeTableToZooKeeper, writeNamespaceToZooKeeper


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f563b7cf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f563b7cf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f563b7cf

Branch: refs/heads/branch-1.4
Commit: f563b7cf0d3cc15f361f3bb7581db9faf26235a9
Parents: 7446b8e
Author: Apekshit Sharma 
Authored: Mon Feb 12 16:16:38 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 14:56:24 2018 -0800

--
 .../security/access/AccessControlLists.java |   9 +-
 .../hbase/security/access/AccessController.java |   4 +-
 .../hbase/security/access/TableAuthManager.java |  75 
 .../security/access/TestTablePermissions.java   |   2 +-
 .../access/TestZKPermissionWatcher.java | 179 +++
 .../access/TestZKPermissionsWatcher.java| 178 --
 6 files changed, 188 insertions(+), 259 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f563b7cf/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index 57c0f7b..f508110 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -73,6 +73,10 @@ import org.apache.hadoop.io.Text;
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Lists;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableFactories;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.jasper.tagplugins.jstl.core.Remove;
 
 /**
  * Maintains lists of permission grants to users and groups to allow for
@@ -667,8 +671,7 @@ public class AccessControlLists {
*
* Writes a set of permission [user: table permission]
*/
-  public static byte[] writePermissionsAsBytes(ListMultimap perms,
-  Configuration conf) {
+  public static byte[] writePermissionsAsBytes(ListMultimap perms) {
 return 
ProtobufUtil.prependPBMagic(ProtobufUtil.toUserTablePermissions(perms).toByteArray());
   }
 
@@ -755,7 +758,7 @@ public class AccessControlLists {
  // Deserialize the table permissions from the KV
  // TODO: This can be improved. Don't build UsersAndPermissions just 
to unpack it again,
  // use the builder
- AccessControlProtos.UsersAndPermissions.Builder builder = 
+ AccessControlProtos.UsersAndPermissions.Builder builder =
AccessControlProtos.UsersAndPermissions.newBuilder();
  ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), 
tag.getTagLength());
  ListMultimap kvPerms =

http://git-wip-us.apache.org/repos/asf/hbase/blob/f563b7cf/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index fd0a704..c889a3e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -243,7 +243,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
   tables.entrySet()) {
   byte[] entry = t.getKey();
   ListMultimap perms = t.getValue();
-  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
+  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
   getAuthManager().getZKPermissionWatcher().writeToZookeeper(entry, 
serialized);
 }
 initialized = true;
@@ -275,7 +275,7 @@ public class AccessController extends 

[5/8] hbase git commit: HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of a KeeperException

2018-02-14 Thread apurtell
HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of 
a KeeperException

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9b1f379f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9b1f379f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9b1f379f

Branch: refs/heads/branch-1.3
Commit: 9b1f379f2e0f9ccb3bdaacb0aba7973ab05e35a0
Parents: 0507413
Author: Ben Lau 
Authored: Wed Feb 14 11:36:04 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 17:23:33 2018 -0800

--
 .../hbase/replication/ReplicationQueues.java|  3 +-
 .../ReplicationQueuesClientZKImpl.java  |  5 ++
 .../replication/ReplicationQueuesZKImpl.java| 10 
 .../replication/ReplicationStateZKBase.java |  8 ++-
 .../cleaner/ReplicationZKLockCleanerChore.java  |  4 +-
 .../master/ReplicationLogCleaner.java   | 10 +++-
 .../hbase/master/cleaner/TestLogsCleaner.java   | 54 
 7 files changed, 79 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9b1f379f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
index 1b1c770..f86d78b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
@@ -125,8 +125,9 @@ public interface ReplicationQueues {
* Get a list of all region servers that have outstanding replication 
queues. These servers could
* be alive, dead or from a previous run of the cluster.
* @return a list of server names
+   * @throws ReplicationException
*/
-  List getListOfReplicators();
+  List getListOfReplicators() throws ReplicationException;
 
   /**
* Checks if the provided znode is the same as this region server's

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b1f379f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index cc407e3..413d613 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -47,6 +47,11 @@ public class ReplicationQueuesClientZKImpl extends 
ReplicationStateZKBase implem
   }
 
   @Override
+  public List getListOfReplicators() throws KeeperException {
+return super.getListOfReplicatorsZK();
+  }
+
+  @Override
   public List getLogsInQueue(String serverName, String queueId) throws 
KeeperException {
 String znode = ZKUtil.joinZNode(this.queuesZNode, serverName);
 znode = ZKUtil.joinZNode(znode, queueId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b1f379f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index 559ab41..97a1762 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -98,6 +98,16 @@ public class ReplicationQueuesZKImpl extends 
ReplicationStateZKBase implements R
   }
 
   @Override
+  public List getListOfReplicators() throws ReplicationException {
+try {
+  return super.getListOfReplicatorsZK();
+} catch (KeeperException e) {
+  LOG.warn("getListOfReplicators() from ZK failed", e);
+  throw new ReplicationException("getListOfReplicators() from ZK failed", 
e);
+}
+  }
+
+  @Override
   public void removeQueue(String queueId) {
 try {
   ZKUtil.deleteNodeRecursively(this.zookeeper, 
ZKUtil.joinZNode(this.myQueuesZnode, queueId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b1f379f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java

[3/8] hbase git commit: HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of a KeeperException

2018-02-14 Thread apurtell
HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of 
a KeeperException

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0743bda0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0743bda0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0743bda0

Branch: refs/heads/branch-2
Commit: 0743bda059d5edd6ca7bbb788c54970dc1212055
Parents: 9cbf936
Author: Ben Lau 
Authored: Tue Feb 13 17:13:50 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 17:23:09 2018 -0800

--
 .../hbase/replication/ReplicationQueues.java|  3 +-
 .../ReplicationQueuesClientZKImpl.java  |  5 ++
 .../replication/ReplicationQueuesZKImpl.java| 12 +++-
 .../replication/ReplicationStateZKBase.java |  8 ++-
 .../master/ReplicationLogCleaner.java   | 12 +++-
 .../regionserver/ReplicationSourceManager.java  | 17 -
 .../hbase/master/cleaner/TestLogsCleaner.java   | 70 
 7 files changed, 107 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0743bda0/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
index 7f440b1..a2d21f7 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
@@ -119,8 +119,9 @@ public interface ReplicationQueues {
* Get a list of all region servers that have outstanding replication 
queues. These servers could
* be alive, dead or from a previous run of the cluster.
* @return a list of server names
+   * @throws ReplicationException
*/
-  List getListOfReplicators();
+  List getListOfReplicators() throws ReplicationException;
 
   /**
* Checks if the provided znode is the same as this region server's

http://git-wip-us.apache.org/repos/asf/hbase/blob/0743bda0/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index e00a7a2..0eeba19 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -59,6 +59,11 @@ public class ReplicationQueuesClientZKImpl extends 
ReplicationStateZKBase implem
   throw new ReplicationException("Internal error while initializing a 
queues client", e);
 }
   }
+  
+  @Override
+  public List getListOfReplicators() throws KeeperException {
+return super.getListOfReplicatorsZK();
+  }
 
   @Override
   public List getLogsInQueue(String serverName, String queueId) throws 
KeeperException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0743bda0/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index 7551cb7..40bdeb8 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -100,7 +100,17 @@ public class ReplicationQueuesZKImpl extends 
ReplicationStateZKBase implements R
   }
 }
   }
-
+  
+  @Override
+  public List getListOfReplicators() throws ReplicationException {
+try {
+  return super.getListOfReplicatorsZK();
+} catch (KeeperException e) {
+  LOG.warn("getListOfReplicators() from ZK failed", e);
+  throw new ReplicationException("getListOfReplicators() from ZK failed", 
e);
+}
+  }
+  
   @Override
   public void removeQueue(String queueId) {
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0743bda0/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java

[7/8] hbase git commit: Update POMs and CHANGES.txt for 1.4.2 RC0

2018-02-14 Thread apurtell
Update POMs and CHANGES.txt for 1.4.2 RC0


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9519ec2e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9519ec2e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9519ec2e

Branch: refs/heads/branch-1.4
Commit: 9519ec2ead17ba3cf81c00618952a9be612574c1
Parents: 2d83c76
Author: Andrew Purtell 
Authored: Wed Feb 14 15:28:49 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 17:26:31 2018 -0800

--
 CHANGES.txt | 31 
 hbase-annotations/pom.xml   |  2 +-
 .../hbase-archetype-builder/pom.xml |  2 +-
 hbase-archetypes/hbase-client-project/pom.xml   |  2 +-
 .../hbase-shaded-client-project/pom.xml |  2 +-
 hbase-archetypes/pom.xml|  2 +-
 hbase-assembly/pom.xml  |  2 +-
 hbase-checkstyle/pom.xml|  4 +--
 hbase-client/pom.xml|  2 +-
 hbase-common/pom.xml|  2 +-
 hbase-error-prone/pom.xml   |  4 +--
 hbase-examples/pom.xml  |  2 +-
 hbase-external-blockcache/pom.xml   |  2 +-
 hbase-hadoop-compat/pom.xml |  2 +-
 hbase-hadoop2-compat/pom.xml|  2 +-
 hbase-it/pom.xml|  2 +-
 hbase-metrics-api/pom.xml   |  2 +-
 hbase-metrics/pom.xml   |  2 +-
 hbase-prefix-tree/pom.xml   |  2 +-
 hbase-procedure/pom.xml |  2 +-
 hbase-protocol/pom.xml  |  2 +-
 hbase-resource-bundle/pom.xml   |  2 +-
 hbase-rest/pom.xml  |  2 +-
 hbase-rsgroup/pom.xml   |  2 +-
 hbase-server/pom.xml|  2 +-
 .../hbase-shaded-check-invariants/pom.xml   |  2 +-
 hbase-shaded/hbase-shaded-client/pom.xml|  2 +-
 hbase-shaded/hbase-shaded-server/pom.xml|  2 +-
 hbase-shaded/pom.xml|  2 +-
 hbase-shell/pom.xml |  2 +-
 hbase-testing-util/pom.xml  |  2 +-
 hbase-thrift/pom.xml|  2 +-
 pom.xml |  2 +-
 33 files changed, 65 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9519ec2e/CHANGES.txt
--
diff --git a/CHANGES.txt b/CHANGES.txt
index a64043f..b50c118 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,5 +1,36 @@
 HBase Change Log
 
+Release Notes - HBase - Version 1.4.2 2/23/2018
+
+** Sub-task
+* [HBASE-19877] - hbase-common and hbase-zookeeper don't add the 
log4j.properties to the resource path for testing
+
+** Bug
+* [HBASE-18282] - ReplicationLogCleaner can delete WALs not yet replicated 
in case of a KeeperException
+* [HBASE-19728] - Add lock to filesCompacting in all place.
+* [HBASE-19871] - delete.rb should require user to provide the column
+* [HBASE-19876] - The exception happening in converting pb mutation to 
hbase.mutation messes up the CellScanner
+* [HBASE-19892] - Checking 'patch attach' and yetus 0.7.0 and move to 
Yetus 0.7.0
+* [HBASE-19900] - Region-level exception destroy the result of batch
+* [HBASE-19901] - Up yetus proclimit on nightlies
+* [HBASE-19905] - ReplicationSyncUp tool will not exit if a peer 
replication is disabled
+* [HBASE-19934] - HBaseSnapshotException when read replicas is enabled and 
online snapshot is taken after region splitting
+* [HBASE-19937] - Ensure createRSGroupTable be called after 
ProcedureExecutor and LoadBalancer are initialized 
+* [HBASE-19972] - Should rethrow  the RetriesExhaustedWithDetailsException 
when failed to apply the batch in ReplicationSink
+* [HBASE-19979] - ReplicationSyncUp tool may leak Zookeeper connection
+* [HBASE-19981] - Boolean#getBoolean is used to parse value
+* [HBASE-19996] - Some nonce procs might not be cleaned up (follow up 
HBASE-19756)
+
+** New Feature
+* [HBASE-19886] - Display maintenance mode in shell, web UI
+
+** Improvement
+* [HBASE-19917] - Improve RSGroupBasedLoadBalancer#filterServers() to be 
more efficient
+
+** Test
+* [HBASE-19949] - TestRSGroupsWithACL fails with 
ExceptionInInitializerError
+
+
 Release Notes - HBase - Version 1.4.1 2/2/2018
 
 ** Sub-task

http://git-wip-us.apache.org/repos/asf/hbase/blob/9519ec2e/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml 

[2/8] hbase git commit: HBASE-19970 (addendum for 1.x only) Remove unused functions from TableAuthManager.

2018-02-14 Thread apurtell
HBASE-19970 (addendum for 1.x only) Remove unused functions from 
TableAuthManager.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3c5871b4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3c5871b4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3c5871b4

Branch: refs/heads/branch-1.4
Commit: 3c5871b4ee8d6c72a6229e88057ac54e4a352d93
Parents: f563b7c
Author: Apekshit Sharma 
Authored: Mon Feb 12 16:30:48 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 14:56:36 2018 -0800

--
 .../hadoop/hbase/security/access/TestZKPermissionWatcher.java  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3c5871b4/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
index a80f184..0961cab 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
@@ -47,8 +47,8 @@ import org.junit.experimental.categories.Category;
  * Test the reading and writing of access permissions to and from zookeeper.
  */
 @Category(LargeTests.class)
-public class TestZKPermissionsWatcher {
-  private static final Log LOG = 
LogFactory.getLog(TestZKPermissionsWatcher.class);
+public class TestZKPermissionWatcher {
+  private static final Log LOG = 
LogFactory.getLog(TestZKPermissionWatcher.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static TableAuthManager AUTH_A;
   private static TableAuthManager AUTH_B;
@@ -90,7 +90,7 @@ public class TestZKPermissionsWatcher {
   }
 
   private void setTableACL(
-  User user, TableAuthManager srcAuthManager, TableAuthManager 
destAuthManager,
+  User user, TableAuthManager srcAuthManager, final TableAuthManager 
destAuthManager,
   TablePermission.Action... actions) throws Exception{
 // update ACL: george RW
 ListMultimap perms = ArrayListMultimap.create();



[6/8] hbase git commit: HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of a KeeperException

2018-02-14 Thread apurtell
HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of 
a KeeperException

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ef847f84
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ef847f84
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ef847f84

Branch: refs/heads/branch-1.2
Commit: ef847f8417b0a300f242fe76769d46d7efb86570
Parents: 0f3bf54
Author: Ben Lau 
Authored: Wed Feb 14 11:36:04 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 17:23:38 2018 -0800

--
 .../hbase/replication/ReplicationQueues.java|  3 +-
 .../ReplicationQueuesClientZKImpl.java  |  5 ++
 .../replication/ReplicationQueuesZKImpl.java| 10 
 .../replication/ReplicationStateZKBase.java |  8 ++-
 .../cleaner/ReplicationZKLockCleanerChore.java  |  4 +-
 .../master/ReplicationLogCleaner.java   | 10 +++-
 .../hbase/master/cleaner/TestLogsCleaner.java   | 54 
 7 files changed, 79 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ef847f84/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
index 3dbbc33..f1457e0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
@@ -104,8 +104,9 @@ public interface ReplicationQueues {
* Get a list of all region servers that have outstanding replication 
queues. These servers could
* be alive, dead or from a previous run of the cluster.
* @return a list of server names
+   * @throws ReplicationException
*/
-  List getListOfReplicators();
+  List getListOfReplicators() throws ReplicationException;
 
   /**
* Checks if the provided znode is the same as this region server's

http://git-wip-us.apache.org/repos/asf/hbase/blob/ef847f84/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index e1a6a49..93a932f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -47,6 +47,11 @@ public class ReplicationQueuesClientZKImpl extends 
ReplicationStateZKBase implem
   }
 
   @Override
+  public List getListOfReplicators() throws KeeperException {
+return super.getListOfReplicatorsZK();
+  }
+
+  @Override
   public List getLogsInQueue(String serverName, String queueId) throws 
KeeperException {
 String znode = ZKUtil.joinZNode(this.queuesZNode, serverName);
 znode = ZKUtil.joinZNode(znode, queueId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/ef847f84/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index 35e5087..3085394 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -88,6 +88,16 @@ public class ReplicationQueuesZKImpl extends 
ReplicationStateZKBase implements R
   }
 
   @Override
+  public List getListOfReplicators() throws ReplicationException {
+try {
+  return super.getListOfReplicatorsZK();
+} catch (KeeperException e) {
+  LOG.warn("getListOfReplicators() from ZK failed", e);
+  throw new ReplicationException("getListOfReplicators() from ZK failed", 
e);
+}
+  }
+
+  @Override
   public void removeQueue(String queueId) {
 try {
   ZKUtil.deleteNodeRecursively(this.zookeeper, 
ZKUtil.joinZNode(this.myQueuesZnode, queueId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/ef847f84/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java

[4/8] hbase git commit: HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of a KeeperException

2018-02-14 Thread apurtell
HBASE-18282 ReplicationLogCleaner can delete WALs not yet replicated in case of 
a KeeperException

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/28ebd29f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/28ebd29f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/28ebd29f

Branch: refs/heads/branch-1
Commit: 28ebd29f0fa2c581d740db5738dd6f69aad3e3ef
Parents: e65004a
Author: Ben Lau 
Authored: Wed Feb 14 11:36:04 2018 -0800
Committer: Andrew Purtell 
Committed: Wed Feb 14 17:23:23 2018 -0800

--
 .../hbase/replication/ReplicationQueues.java|  3 +-
 .../ReplicationQueuesClientZKImpl.java  |  5 ++
 .../replication/ReplicationQueuesZKImpl.java| 10 
 .../replication/ReplicationStateZKBase.java |  8 ++-
 .../cleaner/ReplicationZKLockCleanerChore.java  |  4 +-
 .../master/ReplicationLogCleaner.java   | 10 +++-
 .../hbase/master/cleaner/TestLogsCleaner.java   | 54 
 7 files changed, 79 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/28ebd29f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
index 2409111..ccc7172 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
@@ -126,8 +126,9 @@ public interface ReplicationQueues {
* Get a list of all region servers that have outstanding replication 
queues. These servers could
* be alive, dead or from a previous run of the cluster.
* @return a list of server names
+   * @throws ReplicationException
*/
-  List getListOfReplicators();
+  List getListOfReplicators() throws ReplicationException;
 
   /**
* Checks if the provided znode is the same as this region server's

http://git-wip-us.apache.org/repos/asf/hbase/blob/28ebd29f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index 9078e40..14b4334 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -49,6 +49,11 @@ public class ReplicationQueuesClientZKImpl extends 
ReplicationStateZKBase implem
   }
 
   @Override
+  public List getListOfReplicators() throws KeeperException {
+return super.getListOfReplicatorsZK();
+  }
+
+  @Override
   public List getLogsInQueue(String serverName, String queueId) throws 
KeeperException {
 String znode = ZKUtil.joinZNode(this.queuesZNode, serverName);
 znode = ZKUtil.joinZNode(znode, queueId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/28ebd29f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index a1bd829..dda9adf 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -103,6 +103,16 @@ public class ReplicationQueuesZKImpl extends 
ReplicationStateZKBase implements R
   }
 
   @Override
+  public List getListOfReplicators() throws ReplicationException {
+try {
+  return super.getListOfReplicatorsZK();
+} catch (KeeperException e) {
+  LOG.warn("getListOfReplicators() from ZK failed", e);
+  throw new ReplicationException("getListOfReplicators() from ZK failed", 
e);
+}
+  }
+
+  @Override
   public void removeQueue(String queueId) {
 try {
   ZKUtil.deleteNodeRecursively(this.zookeeper, 
ZKUtil.joinZNode(this.myQueuesZnode, queueId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/28ebd29f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java

hbase git commit: HBASE-19980 NullPointerException when restoring a snapshot after splitting a region

2018-02-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 8a3b4cdc6 -> 9cbf936f9


HBASE-19980 NullPointerException when restoring a snapshot after splitting a 
region

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9cbf936f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9cbf936f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9cbf936f

Branch: refs/heads/branch-2
Commit: 9cbf936f909cee33d628dd8a2bdb5bef3bc54391
Parents: 8a3b4cd
Author: Toshihiro Suzuki 
Authored: Wed Feb 14 19:55:59 2018 +0900
Committer: tedyu 
Committed: Wed Feb 14 09:38:16 2018 -0800

--
 .../hbase/snapshot/RestoreSnapshotHelper.java   | 89 
 .../client/TestRestoreSnapshotFromClient.java   | 20 +
 2 files changed, 73 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9cbf936f/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 404f8ff..c4f0e25 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -195,11 +195,33 @@ public class RestoreSnapshotHelper {
 // this instance, by removing the regions already present in the restore 
dir.
 Set regionNames = new HashSet<>(regionManifests.keySet());
 
+List tableRegions = getTableRegions();
+
 RegionInfo mobRegion = 
MobUtils.getMobRegionInfo(snapshotManifest.getTableDescriptor()
 .getTableName());
+if (tableRegions != null) {
+  // restore the mob region in case
+  if (regionNames.contains(mobRegion.getEncodedName())) {
+monitor.rethrowException();
+status.setStatus("Restoring mob region...");
+List mobRegions = new ArrayList<>(1);
+mobRegions.add(mobRegion);
+restoreHdfsMobRegions(exec, regionManifests, mobRegions);
+regionNames.remove(mobRegion.getEncodedName());
+status.setStatus("Finished restoring mob region.");
+  }
+}
+if (regionNames.contains(mobRegion.getEncodedName())) {
+  // add the mob region
+  monitor.rethrowException();
+  status.setStatus("Cloning mob region...");
+  cloneHdfsMobRegion(regionManifests, mobRegion);
+  regionNames.remove(mobRegion.getEncodedName());
+  status.setStatus("Finished cloning mob region.");
+}
+
 // Identify which region are still available and which not.
 // NOTE: we rely upon the region name as: "table name, start key, end key"
-List tableRegions = getTableRegions();
 if (tableRegions != null) {
   monitor.rethrowException();
   for (RegionInfo regionInfo: tableRegions) {
@@ -213,50 +235,40 @@ public class RestoreSnapshotHelper {
   metaChanges.addRegionToRemove(regionInfo);
 }
   }
-
-  // Restore regions using the snapshot data
-  monitor.rethrowException();
-  status.setStatus("Restoring table regions...");
-  if (regionNames.contains(mobRegion.getEncodedName())) {
-// restore the mob region in case
-List mobRegions = new ArrayList<>(1);
-mobRegions.add(mobRegion);
-restoreHdfsMobRegions(exec, regionManifests, mobRegions);
-regionNames.remove(mobRegion.getEncodedName());
-  }
-  restoreHdfsRegions(exec, regionManifests, 
metaChanges.getRegionsToRestore());
-  status.setStatus("Finished restoring all table regions.");
-
-  // Remove regions from the current table
-  monitor.rethrowException();
-  status.setStatus("Starting to delete excess regions from table");
-  removeHdfsRegions(exec, metaChanges.getRegionsToRemove());
-  status.setStatus("Finished deleting excess regions from table.");
 }
 
 // Regions to Add: present in the snapshot but not in the current table
+List regionsToAdd = new ArrayList<>(regionNames.size());
 if (regionNames.size() > 0) {
-  List regionsToAdd = new ArrayList<>(regionNames.size());
-
   monitor.rethrowException();
-  // add the mob region
-  if (regionNames.contains(mobRegion.getEncodedName())) {
-cloneHdfsMobRegion(regionManifests, mobRegion);
-regionNames.remove(mobRegion.getEncodedName());
-  }
   for (String regionName: regionNames) {
 LOG.info("region to add: " + regionName);
-
regionsToAdd.add(ProtobufUtil.toRegionInfo(regionManifests.get(regionName).getRegionInfo()));
+

hbase git commit: HBASE-19980 NullPointerException when restoring a snapshot after splitting a region

2018-02-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 8d26736bc -> d0f2d18ca


HBASE-19980 NullPointerException when restoring a snapshot after splitting a 
region

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d0f2d18c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d0f2d18c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d0f2d18c

Branch: refs/heads/master
Commit: d0f2d18ca73737764550b319f749a51c876cca39
Parents: 8d26736
Author: Toshihiro Suzuki 
Authored: Wed Feb 14 19:55:59 2018 +0900
Committer: tedyu 
Committed: Wed Feb 14 09:37:16 2018 -0800

--
 .../hbase/snapshot/RestoreSnapshotHelper.java   | 89 
 .../client/TestRestoreSnapshotFromClient.java   | 20 +
 2 files changed, 73 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d0f2d18c/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 404f8ff..c4f0e25 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -195,11 +195,33 @@ public class RestoreSnapshotHelper {
 // this instance, by removing the regions already present in the restore 
dir.
 Set regionNames = new HashSet<>(regionManifests.keySet());
 
+List tableRegions = getTableRegions();
+
 RegionInfo mobRegion = 
MobUtils.getMobRegionInfo(snapshotManifest.getTableDescriptor()
 .getTableName());
+if (tableRegions != null) {
+  // restore the mob region in case
+  if (regionNames.contains(mobRegion.getEncodedName())) {
+monitor.rethrowException();
+status.setStatus("Restoring mob region...");
+List mobRegions = new ArrayList<>(1);
+mobRegions.add(mobRegion);
+restoreHdfsMobRegions(exec, regionManifests, mobRegions);
+regionNames.remove(mobRegion.getEncodedName());
+status.setStatus("Finished restoring mob region.");
+  }
+}
+if (regionNames.contains(mobRegion.getEncodedName())) {
+  // add the mob region
+  monitor.rethrowException();
+  status.setStatus("Cloning mob region...");
+  cloneHdfsMobRegion(regionManifests, mobRegion);
+  regionNames.remove(mobRegion.getEncodedName());
+  status.setStatus("Finished cloning mob region.");
+}
+
 // Identify which region are still available and which not.
 // NOTE: we rely upon the region name as: "table name, start key, end key"
-List tableRegions = getTableRegions();
 if (tableRegions != null) {
   monitor.rethrowException();
   for (RegionInfo regionInfo: tableRegions) {
@@ -213,50 +235,40 @@ public class RestoreSnapshotHelper {
   metaChanges.addRegionToRemove(regionInfo);
 }
   }
-
-  // Restore regions using the snapshot data
-  monitor.rethrowException();
-  status.setStatus("Restoring table regions...");
-  if (regionNames.contains(mobRegion.getEncodedName())) {
-// restore the mob region in case
-List mobRegions = new ArrayList<>(1);
-mobRegions.add(mobRegion);
-restoreHdfsMobRegions(exec, regionManifests, mobRegions);
-regionNames.remove(mobRegion.getEncodedName());
-  }
-  restoreHdfsRegions(exec, regionManifests, 
metaChanges.getRegionsToRestore());
-  status.setStatus("Finished restoring all table regions.");
-
-  // Remove regions from the current table
-  monitor.rethrowException();
-  status.setStatus("Starting to delete excess regions from table");
-  removeHdfsRegions(exec, metaChanges.getRegionsToRemove());
-  status.setStatus("Finished deleting excess regions from table.");
 }
 
 // Regions to Add: present in the snapshot but not in the current table
+List regionsToAdd = new ArrayList<>(regionNames.size());
 if (regionNames.size() > 0) {
-  List regionsToAdd = new ArrayList<>(regionNames.size());
-
   monitor.rethrowException();
-  // add the mob region
-  if (regionNames.contains(mobRegion.getEncodedName())) {
-cloneHdfsMobRegion(regionManifests, mobRegion);
-regionNames.remove(mobRegion.getEncodedName());
-  }
   for (String regionName: regionNames) {
 LOG.info("region to add: " + regionName);
-
regionsToAdd.add(ProtobufUtil.toRegionInfo(regionManifests.get(regionName).getRegionInfo()));
+

hbase git commit: HBASE-19996 Some nonce procs might not be cleaned up (follow up HBASE-19756)

2018-02-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 335b8a8e1 -> e65004aee


HBASE-19996 Some nonce procs might not be cleaned up (follow up HBASE-19756)

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e65004ae
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e65004ae
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e65004ae

Branch: refs/heads/branch-1
Commit: e65004aeea09e751624729a896409eef57f19a1e
Parents: 335b8a8
Author: Thiruvel Thirumoolan 
Authored: Tue Feb 13 17:38:16 2018 -0800
Committer: tedyu 
Committed: Wed Feb 14 09:17:13 2018 -0800

--
 .../hbase/procedure2/ProcedureExecutor.java |  30 +++--
 .../client/TestNonceProcCleanerOnFailure.java   | 101 --
 .../hbase/procedure/TestFailedProcCleanup.java  | 135 +++
 3 files changed, 157 insertions(+), 109 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e65004ae/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index f04a409..6e517aa 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -50,6 +50,7 @@ import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator
 import org.apache.hadoop.hbase.procedure2.util.StringUtils;
 import org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue;
 import 
org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue.TimeoutRetriever;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos;
 import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -178,17 +179,19 @@ public class ProcedureExecutor {
 // TODO: Select TTL based on Procedure type
 if ((procInfo.hasClientAckTime() && (now - 
procInfo.getClientAckTime()) >= evictAckTtl) ||
 (now - procInfo.getLastUpdate()) >= evictTtl) {
-  if (isDebugEnabled) {
-LOG.debug("Evict completed procedure: " + procInfo);
+  // Failed Procedures aren't persisted in WAL.
+  if (!(procInfo instanceof FailedProcedureInfo)) {
+store.delete(entry.getKey());
   }
+  it.remove();
+
   NonceKey nonceKey = procInfo.getNonceKey();
-  // Nonce procedures aren't persisted in WAL.
-  if (nonceKey == null) {
-store.delete(entry.getKey());
-  } else {
+  if (nonceKey != null) {
 nonceKeysToProcIdsMap.remove(nonceKey);
   }
-  it.remove();
+  if (isDebugEnabled) {
+LOG.debug("Evict completed procedure: " + procInfo);
+  }
 }
   }
 }
@@ -696,7 +699,7 @@ public class ProcedureExecutor {
 if (procId == null || completed.containsKey(procId)) return;
 
 final long currentTime = EnvironmentEdgeManager.currentTime();
-final ProcedureInfo result = new ProcedureInfo(
+final ProcedureInfo result = new FailedProcedureInfo(
   procId.longValue(),
   procName,
   procOwner != null ? procOwner.getShortName() : null,
@@ -710,6 +713,17 @@ public class ProcedureExecutor {
 completed.putIfAbsent(procId, result);
   }
 
+  public static class FailedProcedureInfo extends ProcedureInfo {
+
+public FailedProcedureInfo(long procId, String procName, String procOwner,
+ProcedureState procState, long parentId, NonceKey nonceKey,
+ErrorHandlingProtos.ForeignExceptionMessage exception, long 
lastUpdate, long startTime,
+byte[] result) {
+  super(procId, procName, procOwner, procState, parentId, nonceKey, 
exception, lastUpdate,
+  startTime, result);
+}
+  }
+
   // ==
   //  Submit/Abort Procedure
   // ==

http://git-wip-us.apache.org/repos/asf/hbase/blob/e65004ae/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestNonceProcCleanerOnFailure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestNonceProcCleanerOnFailure.java
 

hbase git commit: HBASE-19996 Some nonce procs might not be cleaned up (follow up HBASE-19756)

2018-02-14 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 21152acbe -> 7446b8eaf


HBASE-19996 Some nonce procs might not be cleaned up (follow up HBASE-19756)

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7446b8ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7446b8ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7446b8ea

Branch: refs/heads/branch-1.4
Commit: 7446b8eaf21a2ef57a9352e2000c5b192b0512f9
Parents: 21152ac
Author: Thiruvel Thirumoolan 
Authored: Tue Feb 13 17:38:16 2018 -0800
Committer: tedyu 
Committed: Wed Feb 14 09:16:44 2018 -0800

--
 .../hbase/procedure2/ProcedureExecutor.java |  30 +++--
 .../client/TestNonceProcCleanerOnFailure.java   | 101 --
 .../hbase/procedure/TestFailedProcCleanup.java  | 135 +++
 3 files changed, 157 insertions(+), 109 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7446b8ea/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index f04a409..6e517aa 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -50,6 +50,7 @@ import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator
 import org.apache.hadoop.hbase.procedure2.util.StringUtils;
 import org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue;
 import 
org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue.TimeoutRetriever;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos;
 import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -178,17 +179,19 @@ public class ProcedureExecutor {
 // TODO: Select TTL based on Procedure type
 if ((procInfo.hasClientAckTime() && (now - 
procInfo.getClientAckTime()) >= evictAckTtl) ||
 (now - procInfo.getLastUpdate()) >= evictTtl) {
-  if (isDebugEnabled) {
-LOG.debug("Evict completed procedure: " + procInfo);
+  // Failed Procedures aren't persisted in WAL.
+  if (!(procInfo instanceof FailedProcedureInfo)) {
+store.delete(entry.getKey());
   }
+  it.remove();
+
   NonceKey nonceKey = procInfo.getNonceKey();
-  // Nonce procedures aren't persisted in WAL.
-  if (nonceKey == null) {
-store.delete(entry.getKey());
-  } else {
+  if (nonceKey != null) {
 nonceKeysToProcIdsMap.remove(nonceKey);
   }
-  it.remove();
+  if (isDebugEnabled) {
+LOG.debug("Evict completed procedure: " + procInfo);
+  }
 }
   }
 }
@@ -696,7 +699,7 @@ public class ProcedureExecutor {
 if (procId == null || completed.containsKey(procId)) return;
 
 final long currentTime = EnvironmentEdgeManager.currentTime();
-final ProcedureInfo result = new ProcedureInfo(
+final ProcedureInfo result = new FailedProcedureInfo(
   procId.longValue(),
   procName,
   procOwner != null ? procOwner.getShortName() : null,
@@ -710,6 +713,17 @@ public class ProcedureExecutor {
 completed.putIfAbsent(procId, result);
   }
 
+  public static class FailedProcedureInfo extends ProcedureInfo {
+
+public FailedProcedureInfo(long procId, String procName, String procOwner,
+ProcedureState procState, long parentId, NonceKey nonceKey,
+ErrorHandlingProtos.ForeignExceptionMessage exception, long 
lastUpdate, long startTime,
+byte[] result) {
+  super(procId, procName, procOwner, procState, parentId, nonceKey, 
exception, lastUpdate,
+  startTime, result);
+}
+  }
+
   // ==
   //  Submit/Abort Procedure
   // ==

http://git-wip-us.apache.org/repos/asf/hbase/blob/7446b8ea/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestNonceProcCleanerOnFailure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestNonceProcCleanerOnFailure.java
 

hbase git commit: HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes it so hbase1 can't open hbase2 written hfiles; fix

2018-02-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 2f1b3eab6 -> 8d26736bc


HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes 
it so hbase1 can't open hbase2 written hfiles; fix

Serializing, if appropriate, write the hbase-1.x version of the
Comparator to the hfile trailer so hbase-1.x files can read hbase-2.x
hfiles (they are the same format).


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8d26736b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8d26736b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8d26736b

Branch: refs/heads/master
Commit: 8d26736bc2b0c28efd5caa3be7d8c9037dba633a
Parents: 2f1b3ea
Author: Michael Stack 
Authored: Tue Feb 13 10:28:45 2018 -0800
Committer: Michael Stack 
Committed: Wed Feb 14 07:46:30 2018 -0800

--
 .../hadoop/hbase/io/hfile/FixedFileTrailer.java | 86 ++--
 .../hbase/io/hfile/TestFixedFileTrailer.java| 18 +++-
 2 files changed, 77 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8d26736b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index a0d3df3..55b2ee0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
@@ -185,34 +186,37 @@ public class FixedFileTrailer {
 baos.writeTo(outputStream);
   }
 
-  /**
-   * Write trailer data as protobuf
-   * @param outputStream
-   * @throws IOException
-   */
-  void serializeAsPB(DataOutputStream output) throws IOException {
-ByteArrayOutputStream baos = new ByteArrayOutputStream();
+  @org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting
+  HFileProtos.FileTrailerProto toProtobuf() {
 HFileProtos.FileTrailerProto.Builder builder = 
HFileProtos.FileTrailerProto.newBuilder()
-  .setFileInfoOffset(fileInfoOffset)
-  .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
-  .setUncompressedDataIndexSize(uncompressedDataIndexSize)
-  .setTotalUncompressedBytes(totalUncompressedBytes)
-  .setDataIndexCount(dataIndexCount)
-  .setMetaIndexCount(metaIndexCount)
-  .setEntryCount(entryCount)
-  .setNumDataIndexLevels(numDataIndexLevels)
-  .setFirstDataBlockOffset(firstDataBlockOffset)
-  .setLastDataBlockOffset(lastDataBlockOffset)
-  // TODO this is a classname encoded into an  HFile's trailer. We are 
going to need to have
-  // some compat code here.
-  .setComparatorClassName(comparatorClassName)
-  .setCompressionCodec(compressionCodec.ordinal());
+.setFileInfoOffset(fileInfoOffset)
+.setLoadOnOpenDataOffset(loadOnOpenDataOffset)
+.setUncompressedDataIndexSize(uncompressedDataIndexSize)
+.setTotalUncompressedBytes(totalUncompressedBytes)
+.setDataIndexCount(dataIndexCount)
+.setMetaIndexCount(metaIndexCount)
+.setEntryCount(entryCount)
+.setNumDataIndexLevels(numDataIndexLevels)
+.setFirstDataBlockOffset(firstDataBlockOffset)
+.setLastDataBlockOffset(lastDataBlockOffset)
+.setComparatorClassName(getHBase1CompatibleName(comparatorClassName))
+.setCompressionCodec(compressionCodec.ordinal());
 if (encryptionKey != null) {
   builder.setEncryptionKey(UnsafeByteOperations.unsafeWrap(encryptionKey));
 }
+return builder.build();
+  }
+
+  /**
+   * Write trailer data as protobuf.
+   * NOTE: we run a translation on the comparator name and will serialize the 
old hbase-1.x where
+   * it makes sense. See {@link #getHBase1CompatibleName(String)}.
+   */
+  void serializeAsPB(DataOutputStream output) throws IOException {
+ByteArrayOutputStream baos = new ByteArrayOutputStream();
 // We need this extra copy unfortunately to determine the final size of the
 // delimited output, see use of baos.size() below.
-builder.build().writeDelimitedTo(baos);
+toProtobuf().writeDelimitedTo(baos);
 baos.writeTo(output);
 // Pad to make up the difference between variable PB encoding length and 
the
 // length when encoded as writable under earlier V2 formats. Failure to pad
@@ -298,8 +302,6 @@ public class FixedFileTrailer {
   lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
 }
 

hbase git commit: HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes it so hbase1 can't open hbase2 written hfiles; fix

2018-02-14 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 02bba3b78 -> 8a3b4cdc6


HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes 
it so hbase1 can't open hbase2 written hfiles; fix

Serializing, if appropriate, write the hbase-1.x version of the
Comparator to the hfile trailer so hbase-1.x files can read hbase-2.x
hfiles (they are the same format).


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8a3b4cdc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8a3b4cdc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8a3b4cdc

Branch: refs/heads/branch-2
Commit: 8a3b4cdc676953072719d9d45981fdaff657c0c5
Parents: 02bba3b
Author: Michael Stack 
Authored: Tue Feb 13 10:28:45 2018 -0800
Committer: Michael Stack 
Committed: Wed Feb 14 07:46:05 2018 -0800

--
 .../hadoop/hbase/io/hfile/FixedFileTrailer.java | 86 ++--
 .../hbase/io/hfile/TestFixedFileTrailer.java| 18 +++-
 2 files changed, 77 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8a3b4cdc/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index a0d3df3..55b2ee0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
@@ -185,34 +186,37 @@ public class FixedFileTrailer {
 baos.writeTo(outputStream);
   }
 
-  /**
-   * Write trailer data as protobuf
-   * @param outputStream
-   * @throws IOException
-   */
-  void serializeAsPB(DataOutputStream output) throws IOException {
-ByteArrayOutputStream baos = new ByteArrayOutputStream();
+  @org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting
+  HFileProtos.FileTrailerProto toProtobuf() {
 HFileProtos.FileTrailerProto.Builder builder = 
HFileProtos.FileTrailerProto.newBuilder()
-  .setFileInfoOffset(fileInfoOffset)
-  .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
-  .setUncompressedDataIndexSize(uncompressedDataIndexSize)
-  .setTotalUncompressedBytes(totalUncompressedBytes)
-  .setDataIndexCount(dataIndexCount)
-  .setMetaIndexCount(metaIndexCount)
-  .setEntryCount(entryCount)
-  .setNumDataIndexLevels(numDataIndexLevels)
-  .setFirstDataBlockOffset(firstDataBlockOffset)
-  .setLastDataBlockOffset(lastDataBlockOffset)
-  // TODO this is a classname encoded into an  HFile's trailer. We are 
going to need to have
-  // some compat code here.
-  .setComparatorClassName(comparatorClassName)
-  .setCompressionCodec(compressionCodec.ordinal());
+.setFileInfoOffset(fileInfoOffset)
+.setLoadOnOpenDataOffset(loadOnOpenDataOffset)
+.setUncompressedDataIndexSize(uncompressedDataIndexSize)
+.setTotalUncompressedBytes(totalUncompressedBytes)
+.setDataIndexCount(dataIndexCount)
+.setMetaIndexCount(metaIndexCount)
+.setEntryCount(entryCount)
+.setNumDataIndexLevels(numDataIndexLevels)
+.setFirstDataBlockOffset(firstDataBlockOffset)
+.setLastDataBlockOffset(lastDataBlockOffset)
+.setComparatorClassName(getHBase1CompatibleName(comparatorClassName))
+.setCompressionCodec(compressionCodec.ordinal());
 if (encryptionKey != null) {
   builder.setEncryptionKey(UnsafeByteOperations.unsafeWrap(encryptionKey));
 }
+return builder.build();
+  }
+
+  /**
+   * Write trailer data as protobuf.
+   * NOTE: we run a translation on the comparator name and will serialize the 
old hbase-1.x where
+   * it makes sense. See {@link #getHBase1CompatibleName(String)}.
+   */
+  void serializeAsPB(DataOutputStream output) throws IOException {
+ByteArrayOutputStream baos = new ByteArrayOutputStream();
 // We need this extra copy unfortunately to determine the final size of the
 // delimited output, see use of baos.size() below.
-builder.build().writeDelimitedTo(baos);
+toProtobuf().writeDelimitedTo(baos);
 baos.writeTo(output);
 // Pad to make up the difference between variable PB encoding length and 
the
 // length when encoded as writable under earlier V2 formats. Failure to pad
@@ -298,8 +302,6 @@ public class FixedFileTrailer {
   lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
 }
  

[29/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
index c7fba62..eca6413 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
@@ -127,30 +127,28 @@ the order they are declared.
 
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-RawAsyncHBaseAdmin.compact(TableNametableName,
+private void
+HBaseAdmin.compact(TableNametableName,
byte[]columnFamily,
booleanmajor,
CompactTypecompactType)
-Compact column family of a table, Asynchronous operation 
even if CompletableFuture.get()
+Compact a table.
 
 
 
-private void
-HBaseAdmin.compact(TableNametableName,
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+RawAsyncHBaseAdmin.compact(TableNametableName,
byte[]columnFamily,
booleanmajor,
CompactTypecompactType)
-Compact a table.
+Compact column family of a table, Asynchronous operation 
even if CompletableFuture.get()
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncAdmin.compact(TableNametableName,
+AsyncHBaseAdmin.compact(TableNametableName,
byte[]columnFamily,
-   CompactTypecompactType)
-Compact a column family within a table.
-
+   CompactTypecompactType)
 
 
 void
@@ -161,14 +159,16 @@ the order they are declared.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-RawAsyncHBaseAdmin.compact(TableNametableName,
+void
+HBaseAdmin.compact(TableNametableName,
byte[]columnFamily,
-   CompactTypecompactType)
+   CompactTypecompactType)
+Compact a column family within a table.
+
 
 
-void
-HBaseAdmin.compact(TableNametableName,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncAdmin.compact(TableNametableName,
byte[]columnFamily,
CompactTypecompactType)
 Compact a column family within a table.
@@ -176,16 +176,14 @@ the order they are declared.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncHBaseAdmin.compact(TableNametableName,
+RawAsyncHBaseAdmin.compact(TableNametableName,
byte[]columnFamily,
CompactTypecompactType)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncAdmin.compact(TableNametableName,
-   CompactTypecompactType)
-Compact a table.
-
+AsyncHBaseAdmin.compact(TableNametableName,
+   CompactTypecompactType)
 
 
 void
@@ -195,28 +193,28 @@ the order they are declared.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-RawAsyncHBaseAdmin.compact(TableNametableName,
-   CompactTypecompactType)
-
-
 void
 HBaseAdmin.compact(TableNametableName,
CompactTypecompactType)
 Compact a table.
 
 
+

[22/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html
index 5e1590b..d481372 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCallerFactory.html
@@ -126,15 +126,15 @@
 
 
 private RpcRetryingCallerFactory
-ConnectionImplementation.rpcCallerFactory
+RegionCoprocessorRpcChannel.rpcCallerFactory
 
 
 private RpcRetryingCallerFactory
-HTable.rpcCallerFactory
+ConnectionImplementation.rpcCallerFactory
 
 
 private RpcRetryingCallerFactory
-RegionCoprocessorRpcChannel.rpcCallerFactory
+HTable.rpcCallerFactory
 
 
 private RpcRetryingCallerFactory
@@ -155,21 +155,21 @@
 
 
 RpcRetryingCallerFactory
-ClusterConnection.getNewRpcRetryingCallerFactory(org.apache.hadoop.conf.Configurationconf)
-Returns a new RpcRetryingCallerFactory from the given 
Configuration.
-
+ConnectionImplementation.getNewRpcRetryingCallerFactory(org.apache.hadoop.conf.Configurationconf)
 
 
 RpcRetryingCallerFactory
-ConnectionImplementation.getNewRpcRetryingCallerFactory(org.apache.hadoop.conf.Configurationconf)
+ClusterConnection.getNewRpcRetryingCallerFactory(org.apache.hadoop.conf.Configurationconf)
+Returns a new RpcRetryingCallerFactory from the given 
Configuration.
+
 
 
 RpcRetryingCallerFactory
-ClusterConnection.getRpcRetryingCallerFactory()
+ConnectionImplementation.getRpcRetryingCallerFactory()
 
 
 RpcRetryingCallerFactory
-ConnectionImplementation.getRpcRetryingCallerFactory()
+ClusterConnection.getRpcRetryingCallerFactory()
 
 
 static RpcRetryingCallerFactory

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
index 018438c..6384833 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
@@ -283,14 +283,6 @@ service.
 
 
 private Scan
-AsyncScanSingleRegionRpcRetryingCaller.scan
-
-
-protected Scan
-ScannerCallable.scan
-
-
-private Scan
 ScannerCallableWithReplicas.scan
 
 
@@ -307,6 +299,14 @@ service.
 
 
 private Scan
+AsyncScanSingleRegionRpcRetryingCaller.scan
+
+
+protected Scan
+ScannerCallable.scan
+
+
+private Scan
 TableSnapshotScanner.scan
 
 
@@ -339,11 +339,11 @@ service.
 
 
 protected Scan
-ScannerCallable.getScan()
+ClientScanner.getScan()
 
 
 protected Scan
-ClientScanner.getScan()
+ScannerCallable.getScan()
 
 
 Scan
@@ -638,29 +638,29 @@ service.
 
 
 ResultScanner
-RawAsyncTableImpl.getScanner(Scanscan)
-
-
-ResultScanner
-HTable.getScanner(Scanscan)
-The underlying HTable must 
not be closed.
+AsyncTable.getScanner(Scanscan)
+Returns a scanner on the current table as specified by the 
Scan 
object.
 
 
-
+
 ResultScanner
 Table.getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 ResultScanner
 AsyncTableImpl.getScanner(Scanscan)
 
+
+ResultScanner
+RawAsyncTableImpl.getScanner(Scanscan)
+
 
 ResultScanner
-AsyncTable.getScanner(Scanscan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+HTable.getScanner(Scanscan)
+The underlying HTable must 
not be closed.
 
 
 
@@ -703,7 +703,9 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
-RawAsyncTableImpl.scanAll(Scanscan)
+AsyncTable.scanAll(Scanscan)
+Return all the results that match the given scan 
object.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
@@ -711,9 +713,7 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
-AsyncTable.scanAll(Scanscan)
-Return all the results that match the given scan 
object.
-
+RawAsyncTableImpl.scanAll(Scanscan)
 
 
 private Scan
@@ 

hbase-site git commit: INFRA-10751 Empty commit

2018-02-14 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 828486ae9 -> 8f0525332


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/8f052533
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/8f052533
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/8f052533

Branch: refs/heads/asf-site
Commit: 8f0525332aa9b1e4dea01ab1c16bf54868aa9fa1
Parents: 828486a
Author: jenkins 
Authored: Wed Feb 14 15:14:06 2018 +
Committer: jenkins 
Committed: Wed Feb 14 15:14:06 2018 +

--

--




[43/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index d21d0ac..32feba1 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -889,33 +889,33 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.MemoryCompactionPolicy
+org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.CompareOperator
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.CellBuilderType
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
-org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.client.SnapshotType
-org.apache.hadoop.hbase.client.MasterSwitchType
-org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
-org.apache.hadoop.hbase.client.CompactionState
-org.apache.hadoop.hbase.client.Scan.ReadType
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
 org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
+org.apache.hadoop.hbase.client.Scan.ReadType
+org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.MasterSwitchType
 org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.security.SecurityCapability
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/book.html
--
diff --git a/book.html b/book.html
index 96d4058..416 100644
--- a/book.html
+++ b/book.html
@@ -37303,7 +37303,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2018-02-13 14:29:54 UTC
+Last updated 2018-02-14 14:29:42 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index f219a8a..83c7795 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-02-13
+  Last Published: 
2018-02-14
 
 
 



[16/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 65795ae..463f4fa 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -488,15 +488,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
@@ -506,63 +506,63 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter
-ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index c7d05d1..abcb738 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -143,18 +143,18 @@
 
 
 void
-HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
-
-
-void
 NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 
-
+
 void
 HFileDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 Save metadata in HFile which will be written to disk
 
 
+
+void
+HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
+
 
 
 
@@ -203,18 +203,18 @@
 
 
 
-void
-RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
+abstract void
+BloomContext.addLastBloomKey(HFile.Writerwriter)
+Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
+
 
 
 void
 RowBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
-abstract void
-BloomContext.addLastBloomKey(HFile.Writerwriter)
-Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
-
+void
+RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
 static BloomFilterWriter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
index 479b9d3..274bfad 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
@@ -106,15 +106,15 @@
 
 
 
+private HFileBlock.Writer
+HFileBlockIndex.BlockIndexWriter.blockWriter
+
+
 protected HFileBlock.Writer
 HFileWriterImpl.blockWriter
 block writer
 
 
-
-private HFileBlock.Writer
-HFileBlockIndex.BlockIndexWriter.blockWriter
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index 0c892c8..b293c97 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -136,15 +136,15 @@
 
 
 HFileContext
-HFileBlockDecodingContext.getHFileContext()
+HFileBlockEncodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockDefaultDecodingContext.getHFileContext()
+HFileBlockDecodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockEncodingContext.getHFileContext()
+HFileBlockDefaultDecodingContext.getHFileContext()
 
 
 HFileContext
@@ -224,23 +224,23 @@
 
 
 private HFileContext
+HFile.WriterFactory.fileContext
+
+
+private HFileContext
 HFileBlock.fileContext
 Meta data that holds meta information on the 
hfileblock.
 
 
-
+
 private HFileContext
 HFileBlock.Writer.fileContext
 Meta data that holds information about the hfileblock
 
 
-
-private HFileContext
-HFileBlock.FSReaderImpl.fileContext
-
 
 private HFileContext
-HFile.WriterFactory.fileContext
+HFileBlock.FSReaderImpl.fileContext
 
 
 private HFileContext
@@ -277,20 +277,20 @@
 
 
 HFileContext
-HFileWriterImpl.getFileContext()
-
-
-HFileContext
 HFile.Writer.getFileContext()
 Return the file context for the HFile this writer belongs 
to
 
 
-
+
 HFileContext
 HFile.Reader.getFileContext()
 Return the file context of the HFile this reader belongs 
to
 
 
+
+HFileContext
+HFileWriterImpl.getFileContext()
+
 
 HFileContext
 HFileReaderImpl.getFileContext()
@@ -323,35 +323,35 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
-
-HFileBlockEncodingContext
-HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[]dummyHeader,
-   HFileContextfileContext)
-
 
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
+
 HFileBlockEncodingContext
 NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[]dummyHeader,
HFileContextmeta)
 
-
+
 HFileBlockEncodingContext
 

[34/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 428af8c..6410159 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -239,15 +239,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-HRegionLocation.serverName
+ServerMetricsBuilder.serverName
 
 
 private ServerName
-ServerMetricsBuilder.serverName
+ServerMetricsBuilder.ServerMetricsImpl.serverName
 
 
 private ServerName
-ServerMetricsBuilder.ServerMetricsImpl.serverName
+HRegionLocation.serverName
 
 
 
@@ -306,9 +306,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterMetrics.getMasterName()
-Returns detailed information about the current master ServerName.
-
+ClusterMetricsBuilder.ClusterMetricsImpl.getMasterName()
 
 
 ServerName
@@ -318,11 +316,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getMasterName()
+ClusterMetrics.getMasterName()
+Returns detailed information about the current master ServerName.
+
 
 
 ServerName
-HRegionLocation.getServerName()
+ServerLoad.getServerName()
+Deprecated.
+
 
 
 ServerName
@@ -330,13 +332,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ServerLoad.getServerName()
-Deprecated.
-
+ServerMetricsBuilder.ServerMetricsImpl.getServerName()
 
 
 ServerName
-ServerMetricsBuilder.ServerMetricsImpl.getServerName()
+HRegionLocation.getServerName()
 
 
 ServerName
@@ -405,7 +405,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getBackupMasterNames()
+ClusterMetricsBuilder.ClusterMetricsImpl.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -415,7 +415,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getBackupMasterNames()
+ClusterMetrics.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -428,7 +428,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getDeadServerNames()
+ClusterMetricsBuilder.ClusterMetricsImpl.getDeadServerNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -438,7 +438,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getDeadServerNames()
+ClusterMetrics.getDeadServerNames()
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerLoad
@@ -448,7 +448,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetrics.getLiveServerMetrics()
+ClusterMetricsBuilder.ClusterMetricsImpl.getLiveServerMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
@@ -458,7 +458,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetricsBuilder.ClusterMetricsImpl.getLiveServerMetrics()
+ClusterMetrics.getLiveServerMetrics()
 
 
 static PairRegionInfo,ServerName
@@ -858,31 +858,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-AsyncRequestFutureImpl.SingleServerRequestRunnable.server
+FastFailInterceptorContext.server
 
 
 private ServerName
-FastFailInterceptorContext.server

[13/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
index 570fb68..b8ce496 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
@@ -168,39 +168,27 @@
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf)
-
-
-void
 BlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf)
 Add block to cache (defaults to not in-memory).
 
 
-
+
 void
 LruBlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf)
 Cache the block with the specified name and buffer.
 
 
-
-void
-MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf)
-
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf,
-  booleaninMemory)
+CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf)
 
 
 void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf,
-  booleaninMemory)
+MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf)
 
 
 void
@@ -220,6 +208,18 @@
 
 
 void
+CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf,
+  booleaninMemory)
+
+
+void
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf,
+  booleaninMemory)
+
+
+void
 MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf,
   booleaninMemory)
@@ -232,21 +232,21 @@
 
 
 boolean
-CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
+BlockCache.evictBlock(BlockCacheKeycacheKey)
+Evict block from cache.
+
 
 
 boolean
-InclusiveCombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
+LruBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
-BlockCache.evictBlock(BlockCacheKeycacheKey)
-Evict block from cache.
-
+CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
-LruBlockCache.evictBlock(BlockCacheKeycacheKey)
+InclusiveCombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
@@ -254,35 +254,35 @@
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
+BlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
+booleanupdateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
+LruBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
+booleanupdateCacheMetrics)
+Get the buffer of the block with the specified name.
+
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKeycacheKey,
+CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
-Fetch block from cache.
-
+booleanupdateCacheMetrics)
 
 
 Cacheable
-LruBlockCache.getBlock(BlockCacheKeycacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
-Get the buffer of the block with the specified name.
-
+booleanupdateCacheMetrics)
 
 
 Cacheable
@@ -308,11 +308,6 @@
 CombinedBlockCache.getRefCount(BlockCacheKeycacheKey)
 
 
-void
-CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
-   Cacheableblock)
-
-
 default void
 BlockCache.returnBlock(BlockCacheKeycacheKey,
Cacheableblock)
@@ -320,6 +315,11 @@
  is over.
 
 
+
+void
+CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
+   Cacheableblock)
+
 
 
 
@@ -497,14 +497,14 @@
 
 
 void
-BucketCache.BucketEntryGroup.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
-
-
-void
 CachedEntryQueue.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryentry)
 Attempt to add the specified entry to this queue.
 
 
+
+void
+BucketCache.BucketEntryGroup.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html 

[15/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 6e37f0b..49f85aa 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
+TableRecordReader.createKey()
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey()
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
 
 
 ImmutableBytesWritable
@@ -183,9 +183,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   
org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
@@ -195,11 +197,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   org.apache.hadoop.mapred.Reporterreporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporterreporter)
 
 
 
@@ -218,10 +218,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
-   Resultvalues,
+IdentityTableMap.map(ImmutableBytesWritablekey,
+   Resultvalue,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -234,21 +236,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-   Resultvalue,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
+   Resultvalues,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporterreporter)
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritablekey,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
@@ -281,10 +281,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
-   Resultvalues,
+IdentityTableMap.map(ImmutableBytesWritablekey,
+   Resultvalue,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -297,12 +299,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-   Resultvalue,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
+   Resultvalues,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporterreporter)
 
 
 void
@@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-TableRecordReaderImpl.key
+MultithreadedTableMapper.SubMapRecordReader.key
 
 
 private ImmutableBytesWritable
@@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key
+TableRecordReaderImpl.key
 
 
 (package private) ImmutableBytesWritable
@@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRegionRecordReader.getCurrentKey()

[50/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 38529ee..71e5d2c 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1105,15 +1105,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-Increment
-Increment.add(Cellcell)
-Add the specified KeyValue to this operation.
+Append
+Append.add(Cellcell)
+Add column and value to this Append operation.
 
 
 
-Delete
-Delete.add(Cellcell)
-Add an existing delete marker to this Delete object.
+Increment
+Increment.add(Cellcell)
+Add the specified KeyValue to this operation.
 
 
 
@@ -1123,9 +1123,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.add(Cellcell)
-Add column and value to this Append operation.
+Delete
+Delete.add(Cellcell)
+Add an existing delete marker to this Delete object.
 
 
 
@@ -1208,20 +1208,20 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
   booleanmayHaveMoreCellsInRow)
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Deprecated.
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Use Increment.Increment(byte[],
 long, NavigableMap) instead
+ Use Append.Append(byte[],
 long, NavigableMap) instead
 
 
 
 
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Deprecated.
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Use Delete.Delete(byte[],
 long, NavigableMap) instead
+ Use Increment.Increment(byte[],
 long, NavigableMap) instead
 
 
 
@@ -1244,11 +1244,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Deprecated.
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Use Append.Append(byte[],
 long, NavigableMap) instead
+ Use Delete.Delete(byte[],
 long, NavigableMap) instead
 
 
 
@@ -1311,67 +1311,67 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cellcell)
+FilterList.getNextCellHint(CellcurrentCell)
 
 
 Cell
-TimestampsFilter.getNextCellHint(CellcurrentCell)
-Pick the next cell that the scanner should seek to.
-
+MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
 
 
 Cell
-MultiRowRangeFilter.getNextCellHint(CellcurrentKV)
+ColumnRangeFilter.getNextCellHint(Cellcell)
 
 
-Cell
-ColumnPaginationFilter.getNextCellHint(Cellcell)
+abstract Cell
+Filter.getNextCellHint(CellcurrentCell)
+If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
+ the next key it must seek to.
+
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cellcell)
+ColumnPaginationFilter.getNextCellHint(Cellcell)
 
 
 Cell
-FilterList.getNextCellHint(CellcurrentCell)
+FuzzyRowFilter.getNextCellHint(CellcurrentCell)
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cellcell)

[23/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
index 35f0e35..e3d9f70 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ResultScanner.html
@@ -208,9 +208,9 @@ service.
 
 
 
-ResultScanner
-HTable.getScanner(byte[]family)
-The underlying HTable must 
not be closed.
+default ResultScanner
+AsyncTable.getScanner(byte[]family)
+Gets a scanner on the current table for the given 
family.
 
 
 
@@ -220,16 +220,16 @@ service.
 
 
 
-default ResultScanner
-AsyncTable.getScanner(byte[]family)
-Gets a scanner on the current table for the given 
family.
+ResultScanner
+HTable.getScanner(byte[]family)
+The underlying HTable must 
not be closed.
 
 
 
-ResultScanner
-HTable.getScanner(byte[]family,
+default ResultScanner
+AsyncTable.getScanner(byte[]family,
   byte[]qualifier)
-The underlying HTable must 
not be closed.
+Gets a scanner on the current table for the given family 
and qualifier.
 
 
 
@@ -240,37 +240,37 @@ service.
 
 
 
-default ResultScanner
-AsyncTable.getScanner(byte[]family,
+ResultScanner
+HTable.getScanner(byte[]family,
   byte[]qualifier)
-Gets a scanner on the current table for the given family 
and qualifier.
+The underlying HTable must 
not be closed.
 
 
 
 ResultScanner
-RawAsyncTableImpl.getScanner(Scanscan)
-
-
-ResultScanner
-HTable.getScanner(Scanscan)
-The underlying HTable must 
not be closed.
+AsyncTable.getScanner(Scanscan)
+Returns a scanner on the current table as specified by the 
Scan 
object.
 
 
-
+
 ResultScanner
 Table.getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 ResultScanner
 AsyncTableImpl.getScanner(Scanscan)
 
+
+ResultScanner
+RawAsyncTableImpl.getScanner(Scanscan)
+
 
 ResultScanner
-AsyncTable.getScanner(Scanscan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+HTable.getScanner(Scanscan)
+The underlying HTable must 
not be closed.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
index d730879..b1d1cef 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetriesExhaustedWithDetailsException.html
@@ -106,11 +106,11 @@
 
 
 RetriesExhaustedWithDetailsException
-AsyncRequestFutureImpl.getErrors()
+AsyncRequestFuture.getErrors()
 
 
 RetriesExhaustedWithDetailsException
-AsyncRequestFuture.getErrors()
+AsyncRequestFutureImpl.getErrors()
 
 
 (package private) RetriesExhaustedWithDetailsException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
index 9642faa..0a290e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallable.html
@@ -234,36 +234,28 @@
 
 
 
-T
-RpcRetryingCallerImpl.callWithoutRetries(RetryingCallableTcallable,
-  intcallTimeout)
-
-
 T
 RpcRetryingCaller.callWithoutRetries(RetryingCallableTcallable,
   intcallTimeout)
 Call the server once only.
 
 
-
+
 T
-RpcRetryingCallerImpl.callWithRetries(RetryingCallableTcallable,
-   intcallTimeout)
+RpcRetryingCallerImpl.callWithoutRetries(RetryingCallableTcallable,
+  intcallTimeout)
 
-
+
 T
 RpcRetryingCaller.callWithRetries(RetryingCallableTcallable,
intcallTimeout)
 Retries if invocation fails.
 
 
-
-RetryingCallerInterceptorContext
-NoOpRetryingInterceptorContext.prepare(RetryingCallable?callable)
-
 
-FastFailInterceptorContext
-FastFailInterceptorContext.prepare(RetryingCallable?callable)
+T
+RpcRetryingCallerImpl.callWithRetries(RetryingCallableTcallable,
+   intcallTimeout)
 
 
 abstract RetryingCallerInterceptorContext
@@ -275,13 +267,11 @@
 
 
 RetryingCallerInterceptorContext
-NoOpRetryingInterceptorContext.prepare(RetryingCallable?callable,
-   inttries)

[24/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 78d979d..81b1f23 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -292,7 +292,7 @@ service.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Resultr,
+AsyncMetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -301,7 +301,7 @@ service.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Resultr,
+MetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -309,14 +309,14 @@ service.
 
 
 
-static RegionLocations
-MetaTableAccessor.getRegionLocations(Resultr)
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionLocations
+AsyncMetaTableAccessor.getRegionLocations(Resultr)
 Returns an HRegionLocationList extracted from the 
result.
 
 
 
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionLocations
-AsyncMetaTableAccessor.getRegionLocations(Resultr)
+static RegionLocations
+MetaTableAccessor.getRegionLocations(Resultr)
 Returns an HRegionLocationList extracted from the 
result.
 
 
@@ -326,42 +326,42 @@ service.
 
 
 private static long
-MetaTableAccessor.getSeqNumDuringOpen(Resultr,
+AsyncMetaTableAccessor.getSeqNumDuringOpen(Resultr,
intreplicaId)
 The latest seqnum that the server writing to meta observed 
when opening the region.
 
 
 
 private static long
-AsyncMetaTableAccessor.getSeqNumDuringOpen(Resultr,
+MetaTableAccessor.getSeqNumDuringOpen(Resultr,
intreplicaId)
 The latest seqnum that the server writing to meta observed 
when opening the region.
 
 
 
-static ServerName
-MetaTableAccessor.getServerName(Resultr,
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalServerName
+AsyncMetaTableAccessor.getServerName(Resultr,
  intreplicaId)
 Returns a ServerName from catalog table Result.
 
 
 
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalServerName
-AsyncMetaTableAccessor.getServerName(Resultr,
+static ServerName
+MetaTableAccessor.getServerName(Resultr,
  intreplicaId)
 Returns a ServerName from catalog table Result.
 
 
 
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTableState
+AsyncMetaTableAccessor.getTableState(Resultr)
+
+
 static TableState
 MetaTableAccessor.getTableState(Resultr)
 Decode table state from META Result.
 
 
-
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTableState
-AsyncMetaTableAccessor.getTableState(Resultr)
-
 
 void
 AsyncMetaTableAccessor.MetaTableScanResultConsumer.onNext(Result[]results,
@@ -457,13 +457,13 @@ service.
 ClientScanner.cache
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
 title="class or interface in java.util">DequeResult
-BatchScanResultCache.partialResults
-
-
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
 CompleteScanResultCache.partialResults
 
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
 title="class or interface in java.util">DequeResult
+BatchScanResultCache.partialResults
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in java.util">QueueResult
 AsyncTableResultScanner.queue
@@ -486,7 +486,7 @@ service.
 
 
 Result[]
-BatchScanResultCache.addAndGet(Result[]results,
+AllowPartialScanResultCache.addAndGet(Result[]results,
  booleanisHeartbeatMessage)
 
 
@@ -496,20 +496,24 @@ service.
 
 
 Result[]
-AllowPartialScanResultCache.addAndGet(Result[]results,
+BatchScanResultCache.addAndGet(Result[]results,
  booleanisHeartbeatMessage)
 
 
 Result
-HTable.append(Appendappend)
-
-
-Result
 Table.append(Appendappend)
 Appends values to one or more columns within a single 
row.
 
 
+

[36/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index d92043a..fa63017 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -423,7 +423,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-RegionServerCallable.getHRegionInfo()
+ScannerCallableWithReplicas.getHRegionInfo()
 
 
 HRegionInfo
@@ -435,7 +435,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo()
+RegionServerCallable.getHRegionInfo()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 5fb5aab..bb30224 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -162,7 +162,7 @@ service.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Resultr,
+AsyncMetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -171,7 +171,7 @@ service.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Resultr,
+MetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -304,6 +304,14 @@ service.
 HTableMultiplexer.FlushWorker.addr
 
 
+HRegionLocation
+AsyncClientScanner.OpenScannerResponse.loc
+
+
+private HRegionLocation
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc
+
+
 private HRegionLocation
 AsyncScanSingleRegionRpcRetryingCaller.loc
 
@@ -312,23 +320,15 @@ service.
 AsyncBatchRpcRetryingCaller.RegionRequest.loc
 
 
-HRegionLocation
-AsyncClientScanner.OpenScannerResponse.loc
+protected HRegionLocation
+RegionAdminServiceCallable.location
 
 
-private HRegionLocation
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc
-
-
 protected HRegionLocation
 RegionServerCallable.location
 Some subclasses want to set their own location.
 
 
-
-protected HRegionLocation
-RegionAdminServiceCallable.location
-
 
 
 
@@ -371,11 +371,11 @@ service.
 
 
 protected HRegionLocation
-RegionServerCallable.getLocation()
+MultiServerCallable.getLocation()
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation()
+RegionServerCallable.getLocation()
 
 
 HRegionLocation
@@ -383,43 +383,43 @@ service.
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row)
+HRegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row)
+RegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row,
+HRegionLocator.getRegionLocation(byte[]row,
  booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row,
+RegionLocator.getRegionLocation(byte[]row,
  booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-ClusterConnection.getRegionLocation(TableNametableName,
+ConnectionImplementation.getRegionLocation(TableNametableName,
  byte[]row,
- booleanreload)
-Find region location hosting passed row
-
+ booleanreload)
 
 
 HRegionLocation
-ConnectionImplementation.getRegionLocation(TableNametableName,
+ClusterConnection.getRegionLocation(TableNametableName,
  byte[]row,
- booleanreload)
+ booleanreload)
+Find region location hosting passed row
+
 
 
 private HRegionLocation
@@ -434,15 +434,20 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.locateRegion(byte[]regionName)
+
+
+HRegionLocation
 ClusterConnection.locateRegion(byte[]regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
-ConnectionImplementation.locateRegion(byte[]regionName)
+ConnectionImplementation.locateRegion(TableNametableName,
+byte[]row)
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableNametableName,
 byte[]row)
@@ -450,11 +455,6 @@ service.
  lives in.
 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
index 5ba2deb..024eca4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
@@ -106,11 +106,11 @@
 
 
 private AsyncConnectionImpl
-RawAsyncTableImpl.conn
+AsyncClientScanner.conn
 
 
 private AsyncConnectionImpl
-AsyncBatchRpcRetryingCaller.conn
+AsyncRpcRetryingCallerFactory.conn
 
 
 private AsyncConnectionImpl
@@ -118,19 +118,19 @@
 
 
 private AsyncConnectionImpl
-RegionCoprocessorRpcChannelImpl.conn
+RawAsyncTableImpl.conn
 
 
-protected AsyncConnectionImpl
-AsyncRpcRetryingCaller.conn
+private AsyncConnectionImpl
+RegionCoprocessorRpcChannelImpl.conn
 
 
 private AsyncConnectionImpl
-AsyncClientScanner.conn
+AsyncBatchRpcRetryingCaller.conn
 
 
-private AsyncConnectionImpl
-AsyncRpcRetryingCallerFactory.conn
+protected AsyncConnectionImpl
+AsyncRpcRetryingCaller.conn
 
 
 private AsyncConnectionImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
index e71ca45..d6b1759 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncMasterRequestRpcRetryingCaller.CallableT
-AsyncMasterRequestRpcRetryingCaller.callable
-
-
 private AsyncMasterRequestRpcRetryingCaller.CallableT
 AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.callable
 
+
+private AsyncMasterRequestRpcRetryingCaller.CallableT
+AsyncMasterRequestRpcRetryingCaller.callable
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
index 60fbcff..f31564e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
@@ -106,11 +106,11 @@
 
 
 private AsyncProcess
-BufferedMutatorImpl.ap
+HTableMultiplexer.FlushWorker.ap
 
 
 private AsyncProcess
-HTableMultiplexer.FlushWorker.ap
+BufferedMutatorImpl.ap
 
 
 private AsyncProcess
@@ -137,11 +137,11 @@
 
 
 AsyncProcess
-ClusterConnection.getAsyncProcess()
+ConnectionImplementation.getAsyncProcess()
 
 
 AsyncProcess
-ConnectionImplementation.getAsyncProcess()
+ClusterConnection.getAsyncProcess()
 
 
 (package private) AsyncProcess

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
index c610e19..9a8d746 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
@@ -106,11 +106,11 @@
 
 
 private AsyncRegionLocator
-AsyncConnectionImpl.locator
+AsyncTableRegionLocatorImpl.locator
 
 
 private AsyncRegionLocator
-AsyncTableRegionLocatorImpl.locator
+AsyncConnectionImpl.locator
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
index a970ce5..06fd193 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
@@ -126,13 +126,13 @@
 
 
 
-(package private) AsyncRegistry
-AsyncConnectionImpl.registry
-
-
 private AsyncRegistry
 AsyncMetaRegionLocator.registry
 
+
+(package private) AsyncRegistry
+AsyncConnectionImpl.registry
+
 
 
 


[07/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/PeerProcedureInterface.PeerOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/PeerProcedureInterface.PeerOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/PeerProcedureInterface.PeerOperationType.html
index 3628d68..bd2f966 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/PeerProcedureInterface.PeerOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/PeerProcedureInterface.PeerOperationType.html
@@ -152,27 +152,27 @@ the order they are declared.
 
 
 PeerProcedureInterface.PeerOperationType
-RefreshPeerProcedure.getPeerOperationType()
+DisablePeerProcedure.getPeerOperationType()
 
 
 PeerProcedureInterface.PeerOperationType
-DisablePeerProcedure.getPeerOperationType()
+RemovePeerProcedure.getPeerOperationType()
 
 
 PeerProcedureInterface.PeerOperationType
-UpdatePeerConfigProcedure.getPeerOperationType()
+EnablePeerProcedure.getPeerOperationType()
 
 
 PeerProcedureInterface.PeerOperationType
-AddPeerProcedure.getPeerOperationType()
+RefreshPeerProcedure.getPeerOperationType()
 
 
 PeerProcedureInterface.PeerOperationType
-EnablePeerProcedure.getPeerOperationType()
+AddPeerProcedure.getPeerOperationType()
 
 
 PeerProcedureInterface.PeerOperationType
-RemovePeerProcedure.getPeerOperationType()
+UpdatePeerConfigProcedure.getPeerOperationType()
 
 
 private static PeerProcedureInterface.PeerOperationType

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
index 269bc46..f7a6279 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
@@ -125,11 +125,11 @@
 
 
 private ProcedurePrepareLatch
-RecoverMetaProcedure.syncLatch
+AbstractStateMachineTableProcedure.syncLatch
 
 
 private ProcedurePrepareLatch
-AbstractStateMachineTableProcedure.syncLatch
+RecoverMetaProcedure.syncLatch
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
index 5e8085c..8b6ceb7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
@@ -104,14 +104,14 @@
 
 
 ServerProcedureInterface.ServerOperationType
-ServerCrashProcedure.getServerOperationType()
-
-
-ServerProcedureInterface.ServerOperationType
 ServerProcedureInterface.getServerOperationType()
 Given an operation type we can take decisions about what to 
do with pending operations.
 
 
+
+ServerProcedureInterface.ServerOperationType
+ServerCrashProcedure.getServerOperationType()
+
 
 static ServerProcedureInterface.ServerOperationType
 ServerProcedureInterface.ServerOperationType.valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
index 046295e..e736f37 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
@@ -112,19 +112,19 @@
 
 
 TableProcedureInterface.TableOperationType
-MoveRegionProcedure.getTableOperationType()
+UnassignProcedure.getTableOperationType()
 
 
 TableProcedureInterface.TableOperationType
-GCMergedRegionsProcedure.getTableOperationType()

[48/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
index 2c02c0d..c897bbc 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
@@ -257,26 +257,26 @@
 
 
 
-boolean
-Table.exists(Getget)
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+AsyncTable.exists(Getget)
 Test for the existence of columns in the table, as 
specified by the Get.
 
 
 
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-AsyncTable.exists(Getget)
+boolean
+Table.exists(Getget)
 Test for the existence of columns in the table, as 
specified by the Get.
 
 
 
-Result
-Table.get(Getget)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureResult
+AsyncTable.get(Getget)
 Extracts certain cells from a given row.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureResult
-AsyncTable.get(Getget)
+Result
+Table.get(Getget)
 Extracts certain cells from a given row.
 
 
@@ -290,18 +290,24 @@
 
 
 
-boolean[]
-Table.exists(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
+default http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+AsyncTable.exists(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
 Test for the existence of columns in the table, as 
specified by the Gets.
 
 
 
-default http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-AsyncTable.exists(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
+boolean[]
+Table.exists(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
 Test for the existence of columns in the table, as 
specified by the Gets.
 
 
 
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+AsyncTable.existsAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
+A simple version for batch exists.
+
+
+
 default boolean[]
 Table.existsAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
 Deprecated.
@@ -310,24 +316,18 @@
 
 
 
-
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean

[39/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index e63037e..11ca943 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -535,11 +535,6 @@ service.
 
 
 static Cell
-PrivateCellUtil.createCell(Cellcell,
-  byte[]tags)
-
-
-static Cell
 CellUtil.createCell(Cellcell,
   byte[]tags)
 Deprecated.
@@ -547,13 +542,12 @@ service.
 
 
 
-
+
 static Cell
-PrivateCellUtil.createCell(Cellcell,
-  byte[]value,
+PrivateCellUtil.createCell(Cellcell,
   byte[]tags)
 
-
+
 static Cell
 CellUtil.createCell(Cellcell,
   byte[]value,
@@ -563,12 +557,13 @@ service.
 
 
 
-
+
 static Cell
-PrivateCellUtil.createCell(Cellcell,
-  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
+PrivateCellUtil.createCell(Cellcell,
+  byte[]value,
+  byte[]tags)
 
-
+
 static Cell
 CellUtil.createCell(Cellcell,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
@@ -577,6 +572,11 @@ service.
 
 
 
+
+static Cell
+PrivateCellUtil.createCell(Cellcell,
+  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
+
 
 static Cell
 PrivateCellUtil.createFirstDeleteFamilyCellOnRow(byte[]row,
@@ -757,10 +757,6 @@ service.
 
 
 static byte[]
-PrivateCellUtil.cloneTags(Cellcell)
-
-
-static byte[]
 CellUtil.cloneTags(Cellcell)
 Deprecated.
 As of HBase-2.0. Will be 
removed in HBase-3.0.
@@ -768,6 +764,10 @@ service.
 
 
 
+
+static byte[]
+PrivateCellUtil.cloneTags(Cellcell)
+
 
 static byte[]
 CellUtil.cloneValue(Cellcell)
@@ -781,11 +781,6 @@ service.
 
 
 int
-CellComparatorImpl.compare(Cella,
-   Cellb)
-
-
-int
 KeyValue.MetaComparator.compare(Cellleft,
Cellright)
 Deprecated.
@@ -793,7 +788,7 @@ service.
  table.
 
 
-
+
 int
 KeyValue.KVComparator.compare(Cellleft,
Cellright)
@@ -802,6 +797,11 @@ service.
  rowkey, colfam/qual, timestamp, type, mvcc
 
 
+
+int
+CellComparatorImpl.compare(Cella,
+   Cellb)
+
 
 int
 CellComparatorImpl.compare(Cella,
@@ -812,27 +812,27 @@ service.
 
 
 static int
-PrivateCellUtil.compare(CellComparatorcomparator,
+CellUtil.compare(CellComparatorcomparator,
Cellleft,
byte[]key,
intoffset,
intlength)
-Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
- the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
- the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
- method cannot be used.
+Deprecated.
+As of HBase-2.0. Will be 
removed in HBase-3.0
+
 
 
 
 static int
-CellUtil.compare(CellComparatorcomparator,
+PrivateCellUtil.compare(CellComparatorcomparator,
Cellleft,
byte[]key,
intoffset,
intlength)
-Deprecated.
-As of HBase-2.0. Will be 
removed in HBase-3.0
-
+Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
+ the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
+ the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
+ method cannot be used.
 
 
 
@@ -1035,23 +1035,23 @@ service.
 
 
 int
+KeyValue.KVComparator.compareRows(Cellleft,
+   Cellright)
+Deprecated.
+
+
+
+int
 CellComparatorImpl.compareRows(Cellleft,
Cellright)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparatorImpl.MetaCellComparator.compareRows(Cellleft,
Cellright)
 
-
-int
-KeyValue.KVComparator.compareRows(Cellleft,
-   Cellright)
-Deprecated.
-
-
 
 int
 CellComparator.compareTimestamps(CellleftCell,
@@ -1061,17 +1061,17 @@ service.
 
 
 int
-CellComparatorImpl.compareTimestamps(Cellleft,
+KeyValue.KVComparator.compareTimestamps(Cellleft,
  Cellright)
-Compares cell's timestamps in DESCENDING order.
-
+Deprecated.
+
 
 
 int
-KeyValue.KVComparator.compareTimestamps(Cellleft,
+CellComparatorImpl.compareTimestamps(Cellleft,
  Cellright)
-Deprecated.
-
+Compares cell's timestamps in DESCENDING order.
+
 
 
 static int
@@ -1258,11 +1258,6 @@ service.
 
 
 static Cell
-PrivateCellUtil.createCell(Cellcell,
-  byte[]tags)
-
-
-static Cell
 CellUtil.createCell(Cellcell,
   byte[]tags)
 Deprecated.
@@ -1270,13 +1265,12 @@ service.
 
 
 
-
+
 static Cell
-PrivateCellUtil.createCell(Cellcell,
-  byte[]value,

[20/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index 56a2ea1..98104cb 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -449,14 +449,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableDescriptor
-HTable.getDescriptor()
-
-
-TableDescriptor
 Table.getDescriptor()
 Gets the table 
descriptor for this table.
 
 
+
+TableDescriptor
+HTable.getDescriptor()
+
 
 TableDescriptor
 Admin.getDescriptor(TableNametableName)
@@ -509,51 +509,51 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-AsyncAdmin.getDescriptor(TableNametableName)
-Method for getting the tableDescriptor
-
+AsyncHBaseAdmin.getDescriptor(TableNametableName)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-RawAsyncHBaseAdmin.getDescriptor(TableNametableName)
+AsyncAdmin.getDescriptor(TableNametableName)
+Method for getting the tableDescriptor
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-AsyncHBaseAdmin.getDescriptor(TableNametableName)
+RawAsyncHBaseAdmin.getDescriptor(TableNametableName)
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 RawAsyncHBaseAdmin.getTableDescriptors(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequestrequest)
 
 
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncAdmin.listTableDescriptors()
-List all the userspace tables.
-
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 Admin.listTableDescriptors()
 List all the userspace tables.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 HBaseAdmin.listTableDescriptors()
 
+
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+AsyncAdmin.listTableDescriptors()
+List all the userspace tables.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncAdmin.listTableDescriptors(booleanincludeSysTables)
-List all the tables.
-
+AsyncHBaseAdmin.listTableDescriptors(booleanincludeSysTables)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-RawAsyncHBaseAdmin.listTableDescriptors(booleanincludeSysTables)
+AsyncAdmin.listTableDescriptors(booleanincludeSysTables)
+List all the tables.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncHBaseAdmin.listTableDescriptors(booleanincludeSysTables)
+RawAsyncHBaseAdmin.listTableDescriptors(booleanincludeSysTables)
 
 
 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
index fe5ef34..7161108 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
@@ -166,27 +166,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparatorcomparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+RowIndexCodecV1.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
@@ -198,13 +198,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
-   HFileBlockDecodingContextdecodingCtx)
+BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+   HFileBlockDecodingContextblkDecodingCtx)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
-   HFileBlockDecodingContextblkDecodingCtx)
+RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+   HFileBlockDecodingContextdecodingCtx)
 
 
 
@@ -279,18 +279,18 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
+
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index 66443b9..79b047f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -116,36 +116,36 @@
  HFileBlockDefaultDecodingContextdecodingCtx)
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+protected abstract http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
+BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,

[26/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
index aeaf9fe..232a8b4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
@@ -495,7 +495,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Resultr,
+AsyncMetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -504,7 +504,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Resultr,
+MetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -944,9 +944,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-AsyncAdmin.getRegions(ServerNameserverName)
-Get all the online regions on a region server.
-
+AsyncHBaseAdmin.getRegions(ServerNameserverName)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
@@ -955,22 +953,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-RawAsyncHBaseAdmin.getRegions(ServerNameserverName)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 HBaseAdmin.getRegions(ServerNamesn)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
+AsyncAdmin.getRegions(ServerNameserverName)
+Get all the online regions on a region server.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-AsyncHBaseAdmin.getRegions(ServerNameserverName)
+RawAsyncHBaseAdmin.getRegions(ServerNameserverName)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-AsyncAdmin.getRegions(TableNametableName)
-Get the regions of a given table.
-
+AsyncHBaseAdmin.getRegions(TableNametableName)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
@@ -979,16 +977,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-RawAsyncHBaseAdmin.getRegions(TableNametableName)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 HBaseAdmin.getRegions(TableNametableName)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo

[05/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
index 29b9507..bd8ccff 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
@@ -132,13 +132,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedure?
-MasterServices.getProcedures()
-Get procedures
-
+HMaster.getProcedures()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedure?
-HMaster.getProcedures()
+MasterServices.getProcedures()
+Get procedures
+
 
 
 
@@ -920,44 +920,44 @@
 
 
 
-protected Procedure
-SimpleProcedureScheduler.dequeue()
-
-
 protected abstract Procedure
 AbstractProcedureScheduler.dequeue()
 Fetch one Procedure from the queue
  NOTE: this method is called with the sched lock held.
 
 
-
-protected Procedure[]
-SequentialProcedure.doExecute(TEnvironmentenv)
-
 
+protected Procedure
+SimpleProcedureScheduler.dequeue()
+
+
 protected ProcedureTEnvironment[]
 Procedure.doExecute(TEnvironmentenv)
 Internal method called by the ProcedureExecutor that starts 
the user-level code execute().
 
 
-
-protected ProcedureTEnvironment[]
-ProcedureExecutor.FailedProcedure.execute(TEnvironmentenv)
-
 
 protected Procedure[]
+SequentialProcedure.doExecute(TEnvironmentenv)
+
+
+protected Procedure[]
 StateMachineProcedure.execute(TEnvironmentenv)
 
+
+protected ProcedureTEnvironment[]
+ProcedureInMemoryChore.execute(TEnvironmentenv)
+
 
+protected ProcedureTEnvironment[]
+ProcedureExecutor.FailedProcedure.execute(TEnvironmentenv)
+
+
 protected abstract ProcedureTEnvironment[]
 Procedure.execute(TEnvironmentenv)
 The main code of the procedure.
 
 
-
-protected ProcedureTEnvironment[]
-ProcedureInMemoryChore.execute(TEnvironmentenv)
-
 
 Procedure?
 LockedResource.getExclusiveLockOwnerProcedure()
@@ -1115,14 +1115,14 @@
 
 
 void
-SimpleProcedureScheduler.completionCleanup(Procedureproc)
-
-
-void
 ProcedureScheduler.completionCleanup(Procedureproc)
 The procedure in execution completed.
 
 
+
+void
+SimpleProcedureScheduler.completionCleanup(Procedureproc)
+
 
 static 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure
 ProcedureUtil.convertToProtoProcedure(Procedureproc)
@@ -1135,17 +1135,17 @@
  Procedureprocedure)
 
 
-protected void
-SimpleProcedureScheduler.enqueue(Procedureprocedure,
-   booleanaddFront)
-
-
 protected abstract void
 AbstractProcedureScheduler.enqueue(Procedureprocedure,
booleanaddFront)
 Add the procedure to the queue.
 
 
+
+protected void
+SimpleProcedureScheduler.enqueue(Procedureprocedure,
+   booleanaddFront)
+
 
 private void
 ProcedureExecutor.execCompletionCleanup(Procedureproc)
@@ -1327,14 +1327,14 @@
 
 
 void
-SimpleProcedureScheduler.yield(Procedureproc)
-
-
-void
 ProcedureScheduler.yield(Procedureproc)
 The procedure can't run at the moment.
 
 
+
+void
+SimpleProcedureScheduler.yield(Procedureproc)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureEvent.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureEvent.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureEvent.html
index a9a3870..5f35947 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureEvent.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureEvent.html
@@ -141,11 +141,11 @@
 
 
 ProcedureEvent?
-MasterServices.getInitializedEvent()
+HMaster.getInitializedEvent()
 
 
 ProcedureEvent?
-HMaster.getInitializedEvent()
+MasterServices.getInitializedEvent()
 
 
 ProcedureEvent?

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
index 9c6d034..8c7413e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
@@ -121,11 +121,11 @@
 
 
 ProcedureExecutorMasterProcedureEnv
-MasterServices.getMasterProcedureExecutor()
+HMaster.getMasterProcedureExecutor()
 
 
 ProcedureExecutorMasterProcedureEnv

[47/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index fca7f0f..dc49ff7 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,27 +107,27 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cellc)
+FilterList.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cellc)
+WhileMatchFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cellcell)
+PageFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cellc)
+MultipleColumnPrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cellc)
+InclusiveStopFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cellc)
+KeyOnlyFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
@@ -135,33 +135,35 @@
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cellc)
+ColumnRangeFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cellignored)
+FamilyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cellc)
+RandomRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cellc)
+SkipFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cellc)
+DependentColumnFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
-Deprecated.
-
+Filter.filterCell(Cellc)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
@@ -169,87 +171,85 @@
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cellc)
+ValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cellc)
+ColumnCountGetFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cellc)
+QualifierFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cellc)
+PrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cellignored)
+FuzzyRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cellc)
+TimestampsFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cellc)
+ColumnPrefixFilter.filterCell(Cellcell)
 
 
 Filter.ReturnCode
-Filter.filterCell(Cellc)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+MultiRowRangeFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cellc)
+SingleColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cellignored)
+FirstKeyOnlyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cellcell)
+MultipleColumnPrefixFilter.filterColumn(Cellcell)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cellcell)
+ColumnPrefixFilter.filterColumn(Cellcell)
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellc)
+FilterList.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellc)
+WhileMatchFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellc)
+PageFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellc)
+MultipleColumnPrefixFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellc)
+InclusiveStopFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellc)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 Deprecated.
 
 
@@ -261,44 +261,47 @@
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellc)
+ColumnRangeFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cellignored)
+FamilyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cellc)
+RandomRowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellc)
+DependentColumnFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellc)
-Deprecated.
+Filter.filterKeyValue(Cellc)
+Deprecated.
+As of release 

[33/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Size.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
index 20b7674..6c0e3b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
@@ -189,130 +189,130 @@
 
 
 Size
+RegionLoad.getBloomFilterSize()
+Deprecated.
+
+
+
+Size
 RegionMetrics.getBloomFilterSize()
 
+
+Size
+RegionMetricsBuilder.RegionMetricsImpl.getBloomFilterSize()
+
 
 Size
-RegionLoad.getBloomFilterSize()
+ServerLoad.getMaxHeapSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getBloomFilterSize()
+ServerMetrics.getMaxHeapSize()
 
 
 Size
-ServerMetrics.getMaxHeapSize()
+ServerMetricsBuilder.ServerMetricsImpl.getMaxHeapSize()
 
 
 Size
-ServerLoad.getMaxHeapSize()
+RegionLoad.getMemStoreSize()
 Deprecated.
 
 
 
 Size
-ServerMetricsBuilder.ServerMetricsImpl.getMaxHeapSize()
+RegionMetrics.getMemStoreSize()
 
 
 Size
-RegionMetrics.getMemStoreSize()
+RegionMetricsBuilder.RegionMetricsImpl.getMemStoreSize()
 
 
 Size
-RegionLoad.getMemStoreSize()
+RegionLoad.getStoreFileIndexSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getMemStoreSize()
-
-
-Size
 RegionMetrics.getStoreFileIndexSize()
 TODO: why we pass the same value to different counters? 
Currently, the value from
  getStoreFileIndexSize() is same with getStoreFileRootLevelIndexSize()
  see HRegionServer#createRegionLoad.
 
 
+
+Size
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileIndexSize()
+
 
 Size
-RegionLoad.getStoreFileIndexSize()
+RegionLoad.getStoreFileRootLevelIndexSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileIndexSize()
+RegionMetrics.getStoreFileRootLevelIndexSize()
 
 
 Size
-RegionMetrics.getStoreFileRootLevelIndexSize()
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileRootLevelIndexSize()
 
 
 Size
-RegionLoad.getStoreFileRootLevelIndexSize()
+RegionLoad.getStoreFileSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileRootLevelIndexSize()
+RegionMetrics.getStoreFileSize()
 
 
 Size
-RegionMetrics.getStoreFileSize()
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileSize()
 
 
 Size
-RegionLoad.getStoreFileSize()
+RegionLoad.getStoreFileUncompressedDataIndexSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileSize()
+RegionMetrics.getStoreFileUncompressedDataIndexSize()
 
 
 Size
-RegionMetrics.getStoreFileUncompressedDataIndexSize()
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileUncompressedDataIndexSize()
 
 
 Size
-RegionLoad.getStoreFileUncompressedDataIndexSize()
+RegionLoad.getUncompressedStoreFileSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileUncompressedDataIndexSize()
+RegionMetrics.getUncompressedStoreFileSize()
 
 
 Size
-RegionMetrics.getUncompressedStoreFileSize()
+RegionMetricsBuilder.RegionMetricsImpl.getUncompressedStoreFileSize()
 
 
 Size
-RegionLoad.getUncompressedStoreFileSize()
+ServerLoad.getUsedHeapSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getUncompressedStoreFileSize()
-
-
-Size
 ServerMetrics.getUsedHeapSize()
 
-
-Size
-ServerLoad.getUsedHeapSize()
-Deprecated.
-
-
 
 Size
 ServerMetricsBuilder.ServerMetricsImpl.getUsedHeapSize()

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 7f3e934..320e7bc 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -122,11 +122,11 @@
 
 
 TableDescriptors
-MasterServices.getTableDescriptors()
+HMaster.getTableDescriptors()
 
 
 TableDescriptors
-HMaster.getTableDescriptors()
+MasterServices.getTableDescriptors()
 
 
 



[11/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
index 5f7ce59..7244ce2 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
@@ -114,15 +114,15 @@
 
 
 private PriorityFunction
-RpcExecutor.priority
+SimpleRpcScheduler.priority
 
 
 private PriorityFunction
-RpcExecutor.CallPriorityComparator.priority
+RpcExecutor.priority
 
 
 private PriorityFunction
-SimpleRpcScheduler.priority
+RpcExecutor.CallPriorityComparator.priority
 
 
 
@@ -319,7 +319,7 @@
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority)
 Deprecated.
 
@@ -333,16 +333,18 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority)
 Deprecated.
 
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority,
-  Abortableserver)
+  Abortableserver)
+Constructs a RpcScheduler.
+
 
 
 RpcScheduler
@@ -352,11 +354,9 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority,
-  Abortableserver)
-Constructs a RpcScheduler.
-
+  Abortableserver)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
index 6d59fb7..4a25f5c 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
@@ -123,13 +123,13 @@
 
 
 void
-RpcCallContext.setCallBack(RpcCallbackcallback)
-Sets a callback which has to be executed at the end of this 
RPC call.
-
+ServerCall.setCallBack(RpcCallbackcallback)
 
 
 void
-ServerCall.setCallBack(RpcCallbackcallback)
+RpcCallContext.setCallBack(RpcCallbackcallback)
+Sets a callback which has to be executed at the end of this 
RPC call.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
index baa4e5e..fab4d7a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
@@ -131,24 +131,32 @@
 
 
 
-protected RpcControllerFactory
-RegionAdminServiceCallable.rpcControllerFactory
-
-
 private RpcControllerFactory
 ConnectionImplementation.rpcControllerFactory
 
+
+protected RpcControllerFactory
+ClientScanner.rpcControllerFactory
+
 
+protected RpcControllerFactory
+RegionAdminServiceCallable.rpcControllerFactory
+
+
 (package private) RpcControllerFactory
 AsyncConnectionImpl.rpcControllerFactory
 
-
+
 private RpcControllerFactory
 HTable.rpcControllerFactory
 
+
+private RpcControllerFactory
+HBaseAdmin.rpcControllerFactory
+
 
 private RpcControllerFactory
-RpcRetryingCallerWithReadReplicas.rpcControllerFactory
+SecureBulkLoadClient.rpcControllerFactory
 
 
 protected RpcControllerFactory
@@ -156,15 +164,7 @@
 
 
 private RpcControllerFactory
-HBaseAdmin.rpcControllerFactory
-
-
-private RpcControllerFactory
-SecureBulkLoadClient.rpcControllerFactory
-
-
-protected RpcControllerFactory
-ClientScanner.rpcControllerFactory
+RpcRetryingCallerWithReadReplicas.rpcControllerFactory
 
 
 (package private) RpcControllerFactory
@@ -181,11 +181,11 @@
 
 
 RpcControllerFactory
-ClusterConnection.getRpcControllerFactory()
+ConnectionImplementation.getRpcControllerFactory()
 
 
 RpcControllerFactory
-ConnectionImplementation.getRpcControllerFactory()
+ClusterConnection.getRpcControllerFactory()
 
 
 private RpcControllerFactory

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcExecutor.Handler.html
--
diff --git 

[27/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
index 53d69df..597d1ba 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Mutation.html
@@ -355,24 +355,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-BufferedMutatorImpl.mutate(Mutationm)
-
-
-void
 BufferedMutator.mutate(Mutationmutation)
 Sends a Mutation to 
the table.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncBufferedMutatorImpl.mutate(Mutationmutation)
+
 
+void
+BufferedMutatorImpl.mutate(Mutationm)
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 AsyncBufferedMutator.mutate(Mutationmutation)
 Sends a Mutation to 
the table.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncBufferedMutatorImpl.mutate(Mutationmutation)
-
 
 
 
@@ -390,24 +390,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-BufferedMutatorImpl.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationms)
-
-
-void
 BufferedMutator.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
 Send some Mutations to 
the table.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncBufferedMutatorImpl.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
+
 
+void
+BufferedMutatorImpl.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationms)
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 AsyncBufferedMutator.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
 Send some Mutations to 
the table.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncBufferedMutatorImpl.mutate(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
-
 
 static RowMutations
 RowMutations.of(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
@@ -543,15 +543,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 MutationSerialization.getDeserializer(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassMutationc)
 
 

[02/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegion.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegion.html
index 9856943..36977cd 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegion.html
@@ -257,31 +257,23 @@
 
 
 private HRegion
-MemStoreFlusher.FlushRegionEntry.region
+MetricsRegionWrapperImpl.region
 
 
 private HRegion
-RegionServerServices.PostOpenDeployContext.region
+RegionServicesForStores.region
 
 
 private HRegion
 CompactSplit.CompactionRunner.region
 
 
-protected HRegion
-FlushPolicy.region
-The region configured for this flush policy.
+(package private) HRegion
+RegionCoprocessorHost.region
+The region
 
 
 
-private HRegion
-RegionServicesForStores.region
-
-
-private HRegion
-MetricsRegionWrapperImpl.region
-
-
 protected HRegion
 RegionSplitPolicy.region
 The region configured for this split policy.
@@ -296,19 +288,27 @@
 HRegion.RegionScannerImpl.region
 
 
-(package private) HRegion
-RegionCoprocessorHost.region
-The region
-
+private HRegion
+RegionServerServices.PostOpenDeployContext.region
 
 
-protected HRegion
-HStore.region
+private HRegion
+MemStoreFlusher.FlushRegionEntry.region
 
 
+protected HRegion
+FlushPolicy.region
+The region configured for this flush policy.
+
+
+
 private HRegion
 BusyRegionSplitPolicy.region
 
+
+protected HRegion
+HStore.region
+
 
 
 
@@ -563,14 +563,14 @@
 
 
 void
-HRegionServer.addRegion(HRegionregion)
-
-
-void
 MutableOnlineRegions.addRegion(HRegionr)
 Add to online regions.
 
 
+
+void
+HRegionServer.addRegion(HRegionregion)
+
 
 private RSRpcServices.RegionScannerHolder
 RSRpcServices.addScanner(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringscannerName,
@@ -624,57 +624,57 @@
 
 
 protected void
-FlushPolicy.configureForRegion(HRegionregion)
-Upon construction, this method will be called with the 
region to be governed.
-
+KeyPrefixRegionSplitPolicy.configureForRegion(HRegionregion)
 
 
 protected void
-FlushAllLargeStoresPolicy.configureForRegion(HRegionregion)
+RegionSplitPolicy.configureForRegion(HRegionregion)
+Upon construction, this method will be called with the 
region
+ to be governed.
+
 
 
 protected void
-ConstantSizeRegionSplitPolicy.configureForRegion(HRegionregion)
+DelimitedKeyPrefixRegionSplitPolicy.configureForRegion(HRegionregion)
 
 
 protected void
-RegionSplitPolicy.configureForRegion(HRegionregion)
-Upon construction, this method will be called with the 
region
- to be governed.
-
+IncreasingToUpperBoundRegionSplitPolicy.configureForRegion(HRegionregion)
 
 
 protected void
-FlushNonSloppyStoresFirstPolicy.configureForRegion(HRegionregion)
+FlushAllLargeStoresPolicy.configureForRegion(HRegionregion)
 
 
 protected void
-DelimitedKeyPrefixRegionSplitPolicy.configureForRegion(HRegionregion)
+FlushPolicy.configureForRegion(HRegionregion)
+Upon construction, this method will be called with the 
region to be governed.
+
 
 
 protected void
-KeyPrefixRegionSplitPolicy.configureForRegion(HRegionregion)
+ConstantSizeRegionSplitPolicy.configureForRegion(HRegionregion)
 
 
 protected void
-IncreasingToUpperBoundRegionSplitPolicy.configureForRegion(HRegionregion)
+FlushNonSloppyStoresFirstPolicy.configureForRegion(HRegionregion)
 
 
 protected void
 BusyRegionSplitPolicy.configureForRegion(HRegionregion)
 
 
-static RegionSplitPolicy
-RegionSplitPolicy.create(HRegionregion,
+static FlushPolicy
+FlushPolicyFactory.create(HRegionregion,
   org.apache.hadoop.conf.Configurationconf)
-Create the RegionSplitPolicy configured for the given 
table.
+Create the FlushPolicy configured for the given table.
 
 
 
-static FlushPolicy
-FlushPolicyFactory.create(HRegionregion,
+static RegionSplitPolicy
+RegionSplitPolicy.create(HRegionregion,
   org.apache.hadoop.conf.Configurationconf)
-Create the FlushPolicy configured for the given table.
+Create the RegionSplitPolicy configured for the given 
table.
 
 
 
@@ -766,13 +766,13 @@
 
 
 protected void
-ReversedRegionScannerImpl.initializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
+HRegion.RegionScannerImpl.initializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerjoinedScanners,
 HRegionregion)
 
 
 protected void

[17/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index bb2794a..0c342b2 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -151,115 +151,115 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cellcell)
+FilterListWithAND.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cellc)
+ValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-RowFilter.filterCell(Cellv)
+SkipFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cellc)
+FamilyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-Filter.filterCell(Cellc)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+ColumnPrefixFilter.filterCell(Cellcell)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cellc)
+PageFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cellc)
+RowFilter.filterCell(Cellv)
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cellc)
+ColumnRangeFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cellc)
+ColumnCountGetFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cellc)
+MultipleColumnPrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cellignored)
+ColumnPaginationFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cellc)
+DependentColumnFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cellc)
+FilterListWithOR.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cellc)
+InclusiveStopFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cellc)
+KeyOnlyFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterCell(Cellc)
+MultiRowRangeFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FilterListWithAND.filterCell(Cellc)
+Filter.filterCell(Cellc)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cellc)
+FirstKeyOnlyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cellignored)
+WhileMatchFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cellc)
+TimestampsFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
-Deprecated.
-
+FuzzyRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cellignored)
+FilterList.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterListWithOR.filterCell(Cellc)
+RandomRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cellc)
+PrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cellc)
+SingleColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cellc)
+QualifierFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
@@ -275,158 +275,158 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellc)
-Deprecated.
-
+FilterListBase.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+FamilyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-Filter.filterKeyValue(Cellc)
-Deprecated.
-As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use filterCell(Cell)
-
+ColumnPrefixFilter.filterKeyValue(Cellc)
+Deprecated.
 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellc)
+PageFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellc)
+RowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellc)
+ColumnRangeFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellc)
+ColumnCountGetFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellc)
+MultipleColumnPrefixFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+ColumnPaginationFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellc)
+DependentColumnFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 

[18/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 80108a2..a07a830 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,17 +144,15 @@
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[]bytes)
-Deprecated.
-
-
-
 static HTableDescriptor
 HTableDescriptor.parseFrom(byte[]bytes)
 Deprecated.
 
 
+
+static ClusterId
+ClusterId.parseFrom(byte[]bytes)
+
 
 static HRegionInfo
 HRegionInfo.parseFrom(byte[]bytes)
@@ -165,8 +163,10 @@
 
 
 
-static ClusterId
-ClusterId.parseFrom(byte[]bytes)
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[]bytes)
+Deprecated.
+
 
 
 static SplitLogTask
@@ -220,17 +220,17 @@
 TableDescriptorBuilder.ModifyableTableDescriptor.parseFrom(byte[]bytes)
 
 
+static RegionInfo
+RegionInfo.parseFrom(byte[]bytes)
+
+
 static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.parseFrom(byte[]pbBytes)
 
-
+
 private static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.parseFrom(byte[]bytes)
 
-
-static RegionInfo
-RegionInfo.parseFrom(byte[]bytes)
-
 
 static RegionInfo
 RegionInfo.parseFrom(byte[]bytes,
@@ -305,111 +305,111 @@
 ByteArrayComparable.parseFrom(byte[]pbBytes)
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[]pbBytes)
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
 
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[]pbBytes)
+static ValueFilter
+ValueFilter.parseFrom(byte[]pbBytes)
 
 
-static RowFilter
-RowFilter.parseFrom(byte[]pbBytes)
+static SkipFilter
+SkipFilter.parseFrom(byte[]pbBytes)
 
 
-static FuzzyRowFilter
-FuzzyRowFilter.parseFrom(byte[]pbBytes)
+static FamilyFilter
+FamilyFilter.parseFrom(byte[]pbBytes)
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[]pbBytes)
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[]pbBytes)
 
 
-static RegexStringComparator
-RegexStringComparator.parseFrom(byte[]pbBytes)
+static NullComparator
+NullComparator.parseFrom(byte[]pbBytes)
 
 
-static Filter
-Filter.parseFrom(byte[]pbBytes)
-Concrete implementers can signal a failure condition in 
their code by throwing an
- http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException.
-
+static BigDecimalComparator
+BigDecimalComparator.parseFrom(byte[]pbBytes)
 
 
-static RandomRowFilter
-RandomRowFilter.parseFrom(byte[]pbBytes)
+static ColumnPrefixFilter
+ColumnPrefixFilter.parseFrom(byte[]pbBytes)
 
 
-static FirstKeyOnlyFilter
-FirstKeyOnlyFilter.parseFrom(byte[]pbBytes)
+static PageFilter
+PageFilter.parseFrom(byte[]pbBytes)
 
 
-static SkipFilter
-SkipFilter.parseFrom(byte[]pbBytes)
+static BitComparator
+BitComparator.parseFrom(byte[]pbBytes)
 
 
-static BinaryPrefixComparator
-BinaryPrefixComparator.parseFrom(byte[]pbBytes)
+static RowFilter
+RowFilter.parseFrom(byte[]pbBytes)
 
 
-static TimestampsFilter
-TimestampsFilter.parseFrom(byte[]pbBytes)
+static ColumnRangeFilter
+ColumnRangeFilter.parseFrom(byte[]pbBytes)
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[]pbBytes)
+static ColumnCountGetFilter
+ColumnCountGetFilter.parseFrom(byte[]pbBytes)
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[]pbBytes)
+static SubstringComparator
+SubstringComparator.parseFrom(byte[]pbBytes)
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[]pbBytes)
+static MultipleColumnPrefixFilter
+MultipleColumnPrefixFilter.parseFrom(byte[]pbBytes)
 
 
-static QualifierFilter
-QualifierFilter.parseFrom(byte[]pbBytes)
+static ColumnPaginationFilter
+ColumnPaginationFilter.parseFrom(byte[]pbBytes)
 
 
-static FilterList
-FilterList.parseFrom(byte[]pbBytes)
+static DependentColumnFilter
+DependentColumnFilter.parseFrom(byte[]pbBytes)
 
 
-static BigDecimalComparator
-BigDecimalComparator.parseFrom(byte[]pbBytes)
+static BinaryComparator
+BinaryComparator.parseFrom(byte[]pbBytes)
 
 
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[]pbBytes)
+static InclusiveStopFilter
+InclusiveStopFilter.parseFrom(byte[]pbBytes)
 
 
-static ColumnPaginationFilter
-ColumnPaginationFilter.parseFrom(byte[]pbBytes)
+static KeyOnlyFilter
+KeyOnlyFilter.parseFrom(byte[]pbBytes)
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[]pbBytes)
+static MultiRowRangeFilter
+MultiRowRangeFilter.parseFrom(byte[]pbBytes)
 
 
-static WhileMatchFilter
-WhileMatchFilter.parseFrom(byte[]pbBytes)
+static Filter
+Filter.parseFrom(byte[]pbBytes)

[19/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
index 61695fd..bf8d672 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
@@ -113,17 +113,17 @@
 
 
 
+private Batch.CallbackCResult
+AsyncRequestFutureImpl.callback
+
+
 private Batch.CallbackT
 AsyncProcessTask.callback
 
-
+
 private Batch.CallbackT
 AsyncProcessTask.Builder.callback
 
-
-private Batch.CallbackCResult
-AsyncRequestFutureImpl.callback
-
 
 
 
@@ -148,50 +148,42 @@
 
 
 Rvoid
-HTable.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
- http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
- Batch.CallbackRcallback)
-
-
-Rvoid
 Table.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
  Batch.CallbackRcallback)
 Same as Table.batch(List,
 Object[]), but with a callback.
 
 
+
+Rvoid
+HTable.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+ http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
+ Batch.CallbackRcallback)
+
 
 R extends 
com.google.protobuf.Messagevoid
-HTable.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
+Table.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
com.google.protobuf.Messagerequest,
byte[]startKey,
byte[]endKey,
RresponsePrototype,
-   Batch.CallbackRcallback)
+   Batch.CallbackRcallback)
+Creates an instance of the given Service 
subclass for each table
+ region spanning the range from the startKey row to 
endKey row (inclusive), all
+ the invocations to the same region server will be batched into one call.
+
 
 
 R extends 
com.google.protobuf.Messagevoid
-Table.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
+HTable.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
com.google.protobuf.Messagerequest,
byte[]startKey,
byte[]endKey,
RresponsePrototype,
-   Batch.CallbackRcallback)
-Creates an instance of the given Service 
subclass for each table
- region spanning the range from the startKey row to 
endKey row (inclusive), all
- the invocations to the same region server will be batched into one call.
-
+   Batch.CallbackRcallback)
 
 
 T extends 
com.google.protobuf.Service,Rvoid
-HTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
-  byte[]startKey,
-  byte[]endKey,
-  Batch.CallT,Rcallable,
-  Batch.CallbackRcallback)
-
-
-T extends 
com.google.protobuf.Service,Rvoid
 Table.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
   byte[]endKey,
@@ -203,6 +195,14 @@
  with each Service instance.
 
 
+
+T extends 
com.google.protobuf.Service,Rvoid
+HTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
+  byte[]startKey,
+  byte[]endKey,
+  Batch.CallT,Rcallable,
+  Batch.CallbackRcallback)
+
 
 static Rvoid
 HTable.doBatchWithCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,


[37/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
index 08aeced..0c3406e 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
@@ -242,27 +242,27 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncAdmin.getClusterMetrics()
+AsyncHBaseAdmin.getClusterMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-RawAsyncHBaseAdmin.getClusterMetrics()
+AsyncAdmin.getClusterMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncHBaseAdmin.getClusterMetrics()
+RawAsyncHBaseAdmin.getClusterMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncAdmin.getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+AsyncHBaseAdmin.getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-RawAsyncHBaseAdmin.getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+AsyncAdmin.getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncHBaseAdmin.getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+RawAsyncHBaseAdmin.getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 
@@ -408,11 +408,11 @@
 
 
 void
-RegionLocationFinder.setClusterMetrics(ClusterMetricsstatus)
+BaseLoadBalancer.setClusterMetrics(ClusterMetricsst)
 
 
 void
-BaseLoadBalancer.setClusterMetrics(ClusterMetricsst)
+RegionLocationFinder.setClusterMetrics(ClusterMetricsstatus)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
index f686104..31595fa 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
@@ -186,94 +186,94 @@ the order they are declared.
 
 
 boolean
-HTable.checkAndDelete(byte[]row,
+Table.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
   Deletedelete)
-Deprecated.
+Deprecated.
+Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
+
 
 
 
 boolean
-Table.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
   Deletedelete)
-Deprecated.
-Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
-
+Deprecated.
 
 
 
 boolean
-HTable.checkAndMutate(byte[]row,
+Table.checkAndMutate(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
-  RowMutationsrm)
-Deprecated.
+  RowMutationsmutation)
+Deprecated.
+Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 84b554e..0c9079d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2055,119 +2055,119 @@ service.
 
 
 private TableName
-SnapshotDescription.table
+RegionCoprocessorRpcChannel.table
 
 
 private TableName
-RegionCoprocessorRpcChannel.table
+SnapshotDescription.table
 
 
 private TableName
-RawAsyncTableImpl.tableName
+HRegionLocator.tableName
 
 
 private TableName
-RegionServerCallable.tableName
+ScannerCallableWithReplicas.tableName
 
 
 protected TableName
-RegionAdminServiceCallable.tableName
+ClientScanner.tableName
 
 
 private TableName
-BufferedMutatorImpl.tableName
+AsyncClientScanner.tableName
 
 
 private TableName
-AsyncProcessTask.tableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
 
 
 private TableName
-AsyncProcessTask.Builder.tableName
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
 
 
 private TableName
-AsyncRequestFutureImpl.tableName
+RegionInfoBuilder.tableName
 
 
-protected TableName
-TableBuilderBase.tableName
+private TableName
+RegionInfoBuilder.MutableRegionInfo.tableName
 
 
 private TableName
-AsyncBatchRpcRetryingCaller.tableName
+RawAsyncTableImpl.tableName
 
 
 private TableName
-RegionInfoBuilder.tableName
+RegionCoprocessorRpcChannelImpl.tableName
 
 
 private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName
+AsyncTableRegionLocatorImpl.tableName
 
 
-private TableName
-HTable.tableName
+protected TableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
-TableState.tableName
+HTable.tableName
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+private TableName
+BufferedMutatorImpl.tableName
 
 
-protected TableName
-AsyncTableBuilderBase.tableName
+private TableName
+AsyncBatchRpcRetryingCaller.tableName
 
 
 private TableName
-AsyncSingleRequestRpcRetryingCaller.tableName
+BufferedMutatorParams.tableName
 
 
 private TableName
-ScannerCallableWithReplicas.tableName
+HBaseAdmin.TableFuture.tableName
 
 
-protected TableName
-RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
+private TableName
+AsyncRequestFutureImpl.tableName
 
 
 private TableName
-AsyncTableRegionLocatorImpl.tableName
+AsyncProcessTask.tableName
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+AsyncProcessTask.Builder.tableName
 
 
-private TableName
-RegionCoprocessorRpcChannelImpl.tableName
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
 
 
-protected TableName
-ClientScanner.tableName
+private TableName
+RegionServerCallable.tableName
 
 
 private TableName
-BufferedMutatorParams.tableName
+AsyncSingleRequestRpcRetryingCaller.tableName
 
 
-private TableName
-AsyncClientScanner.tableName
+protected TableName
+TableBuilderBase.tableName
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
+protected TableName
+AsyncTableBuilderBase.tableName
 
 
 private TableName
-HRegionLocator.tableName
+TableState.tableName
 
 
 
@@ -2209,83 +2209,83 @@ service.
 
 
 TableName
-RawAsyncTableImpl.getName()
+AsyncTable.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-RegionLocator.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-BufferedMutatorImpl.getName()
+HRegionLocator.getName()
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table name instance of the table 
whose region we want to locate.
 
 
 
 TableName
-HTable.getName()
+AsyncTableImpl.getName()
 
 
 TableName
-AsyncBufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this
- AsyncBufferedMutator writes to.
-
+RawAsyncTableImpl.getName()
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
-
+AsyncTableRegionLocatorImpl.getName()
 
 
 TableName
-AsyncTableImpl.getName()
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
-AsyncTable.getName()
+RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-AsyncTableRegionLocatorImpl.getName()
+AsyncBufferedMutatorImpl.getName()
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the 

[25/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
index 90f52b0..2ac1b78 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
@@ -106,7 +106,7 @@
 
 
 private RegionLocateType
-AsyncSingleRequestRpcRetryingCaller.locateType
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.locateType
 
 
 RegionLocateType
@@ -114,7 +114,7 @@
 
 
 private RegionLocateType
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.locateType
+AsyncSingleRequestRpcRetryingCaller.locateType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
index e062eb5..fbe0658 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
@@ -230,14 +230,14 @@ service.
 
 
 private RegionLocator
-HFileOutputFormat2.TableInfo.regionLocator
-
-
-private RegionLocator
 TableInputFormatBase.regionLocator
 The RegionLocator of the 
table.
 
 
+
+private RegionLocator
+HFileOutputFormat2.TableInfo.regionLocator
+
 
 
 
@@ -248,15 +248,15 @@ service.
 
 
 
-RegionLocator
-HFileOutputFormat2.TableInfo.getRegionLocator()
-
-
 protected RegionLocator
 TableInputFormatBase.getRegionLocator()
 Allows subclasses to get the RegionLocator.
 
 
+
+RegionLocator
+HFileOutputFormat2.TableInfo.getRegionLocator()
+
 
 
 



[35/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
index f9cacbc..b05384b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
@@ -270,31 +270,31 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
-Get a namespace descriptor by name
-
+AsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-RawAsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+AsyncAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Get a namespace descriptor by name
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+RawAsyncHBaseAdmin.getNamespaceDescriptor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncAdmin.listNamespaceDescriptors()
-List available namespace descriptors
-
+AsyncHBaseAdmin.listNamespaceDescriptors()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-RawAsyncHBaseAdmin.listNamespaceDescriptors()
+AsyncAdmin.listNamespaceDescriptors()
+List available namespace descriptors
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncHBaseAdmin.listNamespaceDescriptors()
+RawAsyncHBaseAdmin.listNamespaceDescriptors()
 
 
 
@@ -307,9 +307,7 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncAdmin.createNamespace(NamespaceDescriptordescriptor)
-Create a new namespace.
-
+AsyncHBaseAdmin.createNamespace(NamespaceDescriptordescriptor)
 
 
 void
@@ -318,16 +316,18 @@
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-RawAsyncHBaseAdmin.createNamespace(NamespaceDescriptordescriptor)
-
-
 void
 HBaseAdmin.createNamespace(NamespaceDescriptordescriptor)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncAdmin.createNamespace(NamespaceDescriptordescriptor)
+Create a new namespace.
+
+
 
 

[08/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index 9c13a58..4d04e3e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -133,11 +133,11 @@
 
 
 ProcedureExecutorMasterProcedureEnv
-MasterServices.getMasterProcedureExecutor()
+HMaster.getMasterProcedureExecutor()
 
 
 ProcedureExecutorMasterProcedureEnv
-HMaster.getMasterProcedureExecutor()
+MasterServices.getMasterProcedureExecutor()
 
 
 private RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,?
@@ -194,15 +194,15 @@
 
 
 protected Procedure.LockState
-GCRegionProcedure.acquireLock(MasterProcedureEnvenv)
+RegionTransitionProcedure.acquireLock(MasterProcedureEnvenv)
 
 
 protected Procedure.LockState
-MergeTableRegionsProcedure.acquireLock(MasterProcedureEnvenv)
+GCRegionProcedure.acquireLock(MasterProcedureEnvenv)
 
 
 protected Procedure.LockState
-RegionTransitionProcedure.acquireLock(MasterProcedureEnvenv)
+MergeTableRegionsProcedure.acquireLock(MasterProcedureEnvenv)
 
 
 protected boolean
@@ -295,7 +295,7 @@
 
 
 protected void
-AssignProcedure.finishTransition(MasterProcedureEnvenv,
+UnassignProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
 
@@ -305,7 +305,7 @@
 
 
 protected void
-UnassignProcedure.finishTransition(MasterProcedureEnvenv,
+AssignProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
 
@@ -314,7 +314,7 @@
 
 
 protected ProcedureMetrics
-AssignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+UnassignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
 
 
 protected ProcedureMetrics
@@ -326,7 +326,7 @@
 
 
 protected ProcedureMetrics
-UnassignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+AssignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
 
 
 (package private) static 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse
@@ -357,7 +357,7 @@
 
 
 ServerName
-AssignProcedure.getServer(MasterProcedureEnvenv)
+UnassignProcedure.getServer(MasterProcedureEnvenv)
 
 
 abstract ServerName
@@ -367,7 +367,7 @@
 
 
 ServerName
-UnassignProcedure.getServer(MasterProcedureEnvenv)
+AssignProcedure.getServer(MasterProcedureEnvenv)
 
 
 private ServerName
@@ -384,19 +384,19 @@
 
 
 protected boolean
-MergeTableRegionsProcedure.hasLock(MasterProcedureEnvenv)
+RegionTransitionProcedure.hasLock(MasterProcedureEnvenv)
 
 
 protected boolean
-RegionTransitionProcedure.hasLock(MasterProcedureEnvenv)
+MergeTableRegionsProcedure.hasLock(MasterProcedureEnvenv)
 
 
 protected boolean
-MergeTableRegionsProcedure.holdLock(MasterProcedureEnvenv)
+RegionTransitionProcedure.holdLock(MasterProcedureEnvenv)
 
 
 protected boolean
-RegionTransitionProcedure.holdLock(MasterProcedureEnvenv)
+MergeTableRegionsProcedure.holdLock(MasterProcedureEnvenv)
 
 
 private boolean
@@ -510,15 +510,15 @@
 
 
 protected void
-MergeTableRegionsProcedure.releaseLock(MasterProcedureEnvenv)
+RegionTransitionProcedure.releaseLock(MasterProcedureEnvenv)
 
 
 protected void
-RegionTransitionProcedure.releaseLock(MasterProcedureEnvenv)
+MergeTableRegionsProcedure.releaseLock(MasterProcedureEnvenv)
 
 
 RemoteProcedureDispatcher.RemoteOperation
-AssignProcedure.remoteCallBuild(MasterProcedureEnvenv,
+UnassignProcedure.remoteCallBuild(MasterProcedureEnvenv,
ServerNameserverName)
 
 
@@ -528,12 +528,12 @@
 
 
 RemoteProcedureDispatcher.RemoteOperation
-UnassignProcedure.remoteCallBuild(MasterProcedureEnvenv,
+AssignProcedure.remoteCallBuild(MasterProcedureEnvenv,
ServerNameserverName)
 
 
 protected boolean
-AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
+UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in 
java.io">IOExceptionexception)
 
@@ -545,7 +545,7 @@
 
 
 protected boolean
-UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
+AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in 
java.io">IOExceptionexception)
 
@@ -566,10 +566,10 @@
 
 
 protected void
-AssignProcedure.reportTransition(MasterProcedureEnvenv,
+UnassignProcedure.reportTransition(MasterProcedureEnvenv,
 

[46/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index d01d840..e1c02b8 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -175,23 +175,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   org.apache.hadoop.mapred.Reporterreporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporterreporter)
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,

org.apache.hadoop.mapred.Reporterreporter)
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   
org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -324,9 +324,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReaderImmutableBytesWritable,Result
-TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplitsplit,
+MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplitsplit,
   
org.apache.hadoop.mapreduce.TaskAttemptContextcontext)
-Builds a TableRecordReader.
+Builds a TableRecordReader.
 
 
 
@@ -336,19 +336,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReaderImmutableBytesWritable,Result
-MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplitsplit,
+TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplitsplit,
   
org.apache.hadoop.mapreduce.TaskAttemptContextcontext)
-Builds a TableRecordReader.
+Builds a TableRecordReader.
 
 
 
-org.apache.hadoop.mapreduce.RecordWriterImmutableBytesWritable,Cell
-HFileOutputFormat2.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContextcontext)
-
-
 org.apache.hadoop.mapreduce.RecordWriterImmutableBytesWritable,Mutation
 MultiTableOutputFormat.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContextcontext)
 
+
+org.apache.hadoop.mapreduce.RecordWriterImmutableBytesWritable,Cell
+HFileOutputFormat2.getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContextcontext)
+
 
 
 
@@ -375,6 +375,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 int
+SimpleTotalOrderPartitioner.getPartition(ImmutableBytesWritablekey,
+VALUEvalue,
+intreduces)
+
+
+int
 HRegionPartitioner.getPartition(ImmutableBytesWritablekey,
 VALUEvalue,
 intnumPartitions)
@@ -382,12 +388,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  number of partitions i.e.
 
 
-
-int
-SimpleTotalOrderPartitioner.getPartition(ImmutableBytesWritablekey,
-VALUEvalue,
-intreduces)
-
 
 void
 IdentityTableMapper.map(ImmutableBytesWritablekey,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
index 97f7556..c58acad 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/TimeRange.html
@@ -123,25 +123,25 @@
 
 
 TimeRange
-Increment.getTimeRange()
-Gets the TimeRange used for this increment.
+Get.getTimeRange()
+Method for retrieving the get's TimeRange
 
 
 
 TimeRange
-Scan.getTimeRange()
+Append.getTimeRange()
+Gets the TimeRange used for this append.
+
 
 
 TimeRange
-Append.getTimeRange()
-Gets the TimeRange used for this append.
+Increment.getTimeRange()
+Gets the TimeRange used for this increment.
 
 
 
 TimeRange

[49/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html 
b/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
index b660332..bb01f46 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
@@ -201,11 +201,11 @@ the order they are declared.
 
 
 protected CompareOperator
-SingleColumnValueFilter.op
+CompareFilter.op
 
 
 protected CompareOperator
-CompareFilter.op
+SingleColumnValueFilter.op
 
 
 
@@ -227,11 +227,11 @@ the order they are declared.
 
 
 CompareOperator
-SingleColumnValueFilter.getCompareOperator()
+CompareFilter.getCompareOperator()
 
 
 CompareOperator
-CompareFilter.getCompareOperator()
+SingleColumnValueFilter.getCompareOperator()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/class-use/ServerMetrics.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/ServerMetrics.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ServerMetrics.html
index 9a2bd97..0056f60 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/ServerMetrics.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/ServerMetrics.html
@@ -122,13 +122,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterStatus.getLiveServerMetrics()
-Deprecated.
-
+ClusterMetrics.getLiveServerMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetrics.getLiveServerMetrics()
+ClusterStatus.getLiveServerMetrics()
+Deprecated.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index fa07da3..6ea4d25 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -140,23 +140,23 @@
 
 
 ServerName
-ClusterStatus.getMasterName()
-Deprecated.
-
-
-
-ServerName
 ClusterMetrics.getMasterName()
 Returns detailed information about the current master ServerName.
 
 
+
+ServerName
+ClusterStatus.getMasterName()
+Deprecated.
+
+
 
 ServerName
-ServerMetrics.getServerName()
+HRegionLocation.getServerName()
 
 
 ServerName
-HRegionLocation.getServerName()
+ServerMetrics.getServerName()
 
 
 ServerName
@@ -207,13 +207,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterStatus.getBackupMasterNames()
-Deprecated.
-
+ClusterMetrics.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getBackupMasterNames()
+ClusterStatus.getBackupMasterNames()
+Deprecated.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -226,23 +226,23 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterStatus.getDeadServerNames()
-Deprecated.
-
+ClusterMetrics.getDeadServerNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getDeadServerNames()
+ClusterStatus.getDeadServerNames()
+Deprecated.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterStatus.getLiveServerMetrics()
-Deprecated.
-
+ClusterMetrics.getLiveServerMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetrics.getLiveServerMetrics()
+ClusterStatus.getLiveServerMetrics()
+Deprecated.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionServerName

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 

[09/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
index d2c9cca..146b426 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionPlan.html
@@ -282,7 +282,10 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionPlan
-FavoredStochasticBalancer.balanceCluster(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfoclusterState)
+SimpleLoadBalancer.balanceCluster(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfoclusterMap)
+Generate a global load balancing plan according to the 
specified map of
+ server information to the most loaded regions of each server.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionPlan
@@ -292,19 +295,16 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionPlan
-SimpleLoadBalancer.balanceCluster(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfoclusterMap)
-Generate a global load balancing plan according to the 
specified map of
- server information to the most loaded regions of each server.
-
+FavoredStochasticBalancer.balanceCluster(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfoclusterState)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionPlan
-StochasticLoadBalancer.balanceCluster(TableNametableName,
+SimpleLoadBalancer.balanceCluster(TableNametableName,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfoclusterState)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionPlan
-SimpleLoadBalancer.balanceCluster(TableNametableName,
+StochasticLoadBalancer.balanceCluster(TableNametableName,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfoclusterState)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.html
index 00f46c3..75603cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.html
@@ -143,7 +143,7 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionState
-ClusterMetrics.getRegionStatesInTransition()
+ClusterMetricsBuilder.ClusterMetricsImpl.getRegionStatesInTransition()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionState
@@ -153,7 +153,7 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionState
-ClusterMetricsBuilder.ClusterMetricsImpl.getRegionStatesInTransition()
+ClusterMetrics.getRegionStatesInTransition()
 
 
 


[45/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
index 21f867f..23f3748 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Bytes.html
@@ -129,13 +129,13 @@
 
 
 Bytes
-HColumnDescriptor.getValue(Byteskey)
+HTableDescriptor.getValue(Byteskey)
 Deprecated.
 
 
 
 Bytes
-HTableDescriptor.getValue(Byteskey)
+HColumnDescriptor.getValue(Byteskey)
 Deprecated.
 
 
@@ -150,25 +150,25 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
-HColumnDescriptor.getValues()
+HTableDescriptor.getValues()
 Deprecated.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
-HColumnDescriptor.getValues()
+HTableDescriptor.getValues()
 Deprecated.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
-HTableDescriptor.getValues()
+HColumnDescriptor.getValues()
 Deprecated.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
-HTableDescriptor.getValues()
+HColumnDescriptor.getValues()
 Deprecated.
 
 
@@ -183,13 +183,13 @@
 
 
 Bytes
-HColumnDescriptor.getValue(Byteskey)
+HTableDescriptor.getValue(Byteskey)
 Deprecated.
 
 
 
 Bytes
-HTableDescriptor.getValue(Byteskey)
+HColumnDescriptor.getValue(Byteskey)
 Deprecated.
 
 
@@ -240,13 +240,13 @@
 
 
 Bytes
-TableDescriptor.getValue(Byteskey)
-Getter for accessing the metadata associated with the 
key.
-
+ColumnFamilyDescriptor.getValue(Byteskey)
 
 
 Bytes
-ColumnFamilyDescriptor.getValue(Byteskey)
+TableDescriptor.getValue(Byteskey)
+Getter for accessing the metadata associated with the 
key.
+
 
 
 
@@ -259,14 +259,6 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
-TableDescriptor.getValues()
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
-TableDescriptor.getValues()
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
 ColumnFamilyDescriptor.getValues()
 It clone all bytes of all elements.
 
@@ -277,6 +269,14 @@
 It clone all bytes of all elements.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
+TableDescriptor.getValues()
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
+TableDescriptor.getValues()
+
 
 
 
@@ -288,13 +288,13 @@
 
 
 Bytes
-TableDescriptor.getValue(Byteskey)
-Getter for accessing the metadata associated with the 
key.
-
+ColumnFamilyDescriptor.getValue(Byteskey)
 
 
 Bytes
-ColumnFamilyDescriptor.getValue(Byteskey)
+TableDescriptor.getValue(Byteskey)
+Getter for accessing the metadata associated with the 
key.
+
 
 
 TableDescriptorBuilder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
index 9142a21..109b7e5 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
@@ -112,15 +112,15 @@
 
 
 protected Order
-RawBytes.order
+RawString.order
 
 
 protected Order
-OrderedBytesBase.order
+RawBytes.order
 
 
 protected Order
-RawString.order
+OrderedBytesBase.order
 
 
 
@@ -133,7 +133,7 @@
 
 
 Order
-RawBytes.getOrder()
+RawByte.getOrder()
 
 
 Order
@@ -141,66 +141,66 @@
 
 
 Order
-RawShort.getOrder()
+RawFloat.getOrder()
 
 
 Order
-TerminatedWrapper.getOrder()
+PBType.getOrder()
 
 
 Order
-OrderedBytesBase.getOrder()
+RawInteger.getOrder()
 
 
 Order
-RawFloat.getOrder()
+DataType.getOrder()
+Retrieve the sort Order imposed by this data type, 
or null when
+ natural ordering is not preserved.
+
 
 
 Order
-Union2.getOrder()
+RawLong.getOrder()
 
 
 Order
-Struct.getOrder()
+RawShort.getOrder()
 
 
 Order
-RawInteger.getOrder()
+RawString.getOrder()
 
 
 Order
-PBType.getOrder()
+RawBytes.getOrder()
 
 
 Order
-Union3.getOrder()
+Struct.getOrder()
 
 
 Order
-RawString.getOrder()
+Union3.getOrder()
 
 
 Order
-RawByte.getOrder()
+RawDouble.getOrder()
 
 
 Order

[38/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index afd9d09..f30793d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -172,27 +172,27 @@
 
 
 static int
-PrivateCellUtil.compare(CellComparatorcomparator,
+CellUtil.compare(CellComparatorcomparator,
Cellleft,
byte[]key,
intoffset,
intlength)
-Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
- the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
- the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
- method cannot be used.
+Deprecated.
+As of HBase-2.0. Will be 
removed in HBase-3.0
+
 
 
 
 static int
-CellUtil.compare(CellComparatorcomparator,
+PrivateCellUtil.compare(CellComparatorcomparator,
Cellleft,
byte[]key,
intoffset,
intlength)
-Deprecated.
-As of HBase-2.0. Will be 
removed in HBase-3.0
-
+Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
+ the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
+ the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
+ method cannot be used.
 
 
 
@@ -265,12 +265,12 @@
 
 
 int
-RowIndexSeekerV1.compareKey(CellComparatorcomparator,
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
   Cellkey)
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
+RowIndexSeekerV1.compareKey(CellComparatorcomparator,
   Cellkey)
 
 
@@ -282,27 +282,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparatorcomparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+RowIndexCodecV1.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
@@ -340,9 +340,9 @@
 
 
 
-protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
+private CellComparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
@@ -356,9 +356,9 @@
 
 
 
-private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+protected CellComparator
+HFileWriterImpl.comparator
+Key comparator.
 
 
 
@@ -539,15 +539,15 @@
 
 
 private CellComparator
-StripeStoreFileManager.cellComparator
+DefaultStoreFileManager.cellComparator
 
 
 private CellComparator
-DefaultStoreFileManager.cellComparator
+StripeStoreFileManager.cellComparator
 
 
-protected CellComparator
-StripeMultiFileWriter.comparator
+private CellComparator
+StoreFileWriter.Builder.comparator
 
 
 protected CellComparator
@@ -555,31 +555,31 @@
 
 
 private CellComparator
-Segment.comparator
+StoreScanner.comparator
 
 
 private CellComparator
-ScanInfo.comparator
+AbstractMemStore.comparator
 
 
 private CellComparator
-StoreFileWriter.Builder.comparator
+HStoreFile.comparator
 
 
 private CellComparator
-HStoreFile.comparator
+Segment.comparator
 
 
 protected CellComparator
 HRegion.RegionScannerImpl.comparator
 
 
-private CellComparator
-AbstractMemStore.comparator
+protected CellComparator
+StripeMultiFileWriter.comparator
 
 
 private CellComparator
-StoreScanner.comparator
+ScanInfo.comparator
 
 
 protected CellComparator
@@ -609,48 +609,48 @@
 HRegion.getCellComparator()
 
 
-CellComparator
-StoreFileReader.getComparator()
+(package private) CellComparator
+StoreFileScanner.getComparator()
 
 
 protected CellComparator
-Segment.getComparator()
-Returns the Cell comparator used by this segment
-
+AbstractMemStore.getComparator()
 
 
 CellComparator
-ScanInfo.getComparator()
+StoreFileReader.getComparator()
 
 
 CellComparator

[42/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index c2ed63e..4503cc3 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -286,10 +286,10 @@
 Warnings
 Errors
 
-3533
+3534
 0
 0
-16576
+16577
 
 Files
 
@@ -1707,8613 +1707,8618 @@
 org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java
 0
 0
 3
 
+org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java
+0
+0
+3
+
 org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestAsyncTableScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestAttributes.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestBufferedMutator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestCheckAndMutate.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestClientNoCluster.java
 0
 0
 33
-
+
 org/apache/hadoop/hbase/client/TestClientPushback.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/TestClientScanner.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/TestClientTimeouts.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/TestColumnFamilyDescriptorBuilder.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestConnectionImplementation.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/client/TestEnableTable.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestFastFail.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/TestFromClientSide.java
 0
 0
 71
-
+
 org/apache/hadoop/hbase/client/TestFromClientSide3.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestFromClientSideScanExcpetion.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestGet.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestIntraRowPagination.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestLeaseRenewal.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestLimitedScanWithFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestMetaCache.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/TestMetricsConnection.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClient.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestMultiParallel.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/client/TestOperation.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestPutWithDelete.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestRawAsyncTableLimitedScanWithFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestRawAsyncTableScan.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestReplicasClient.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestResult.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestResultSizeEstimation.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestReversedScannerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestScan.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestScannerTimeout.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestServerBusyException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
 0
 0

[03/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
index ed15d9b..3d03e17 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
@@ -248,7 +248,7 @@ the order they are declared.
 
 
 values
-public staticWALProcedureStore.PushType[]values()
+public staticWALProcedureStore.PushType[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -268,7 +268,7 @@ for (WALProcedureStore.PushType c : 
WALProcedureStore.PushType.values())
 
 
 valueOf
-public staticWALProcedureStore.PushTypevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticWALProcedureStore.PushTypevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/procedure2/util/class-use/DelayedUtil.DelayedWithTimeout.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/util/class-use/DelayedUtil.DelayedWithTimeout.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/util/class-use/DelayedUtil.DelayedWithTimeout.html
index c6f6a46..5bd2115 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/util/class-use/DelayedUtil.DelayedWithTimeout.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/util/class-use/DelayedUtil.DelayedWithTimeout.html
@@ -141,11 +141,11 @@
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/DelayQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">DelayQueueDelayedUtil.DelayedWithTimeout
-ProcedureExecutor.TimeoutExecutorThread.queue
+RemoteProcedureDispatcher.TimeoutExecutorThread.queue
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/DelayQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">DelayQueueDelayedUtil.DelayedWithTimeout
-RemoteProcedureDispatcher.TimeoutExecutorThread.queue
+ProcedureExecutor.TimeoutExecutorThread.queue
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/quotas/class-use/MasterQuotaManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/MasterQuotaManager.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/class-use/MasterQuotaManager.html
index 934c2fa..dd6045b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/MasterQuotaManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/quotas/class-use/MasterQuotaManager.html
@@ -125,11 +125,11 @@
 
 
 MasterQuotaManager
-MasterServices.getMasterQuotaManager()
+HMaster.getMasterQuotaManager()
 
 
 MasterQuotaManager
-HMaster.getMasterQuotaManager()
+MasterServices.getMasterQuotaManager()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/quotas/class-use/QuotaFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/QuotaFilter.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/class-use/QuotaFilter.html
index a495cd1..d81fa5e 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/QuotaFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/class-use/QuotaFilter.html
@@ -110,9 +110,7 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListQuotaSettings
-AsyncAdmin.getQuota(QuotaFilterfilter)
-List the quotas based on the filter.
-
+AsyncHBaseAdmin.getQuota(QuotaFilterfilter)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListQuotaSettings
@@ -121,16 +119,18 @@
 
 
 

[41/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 361fd1b..780d2d7 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2018 The Apache Software Foundation
 
-  File: 3533,
- Errors: 16576,
+  File: 3534,
+ Errors: 16577,
  Warnings: 0,
  Infos: 0
   
@@ -1894,7 +1894,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine.java;>org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mob.MobUtils.java;>org/apache/hadoop/hbase/mob/MobUtils.java
 
 
   0
@@ -1903,12 +1903,12 @@ under the License.
   0
 
 
-  1
+  15
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mob.MobUtils.java;>org/apache/hadoop/hbase/mob/MobUtils.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine.java;>org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
 
 
   0
@@ -1917,7 +1917,7 @@ under the License.
   0
 
 
-  15
+  1
 
   
   
@@ -3770,7 +3770,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.HFileArchiveTestingUtil.java;>org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.quotas.TestQuotaObserverChore.java;>org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
 
 
   0
@@ -3779,12 +3779,12 @@ under the License.
   0
 
 
-  9
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.quotas.TestQuotaObserverChore.java;>org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.HFileArchiveTestingUtil.java;>org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
 
 
   0
@@ -3793,7 +3793,7 @@ under the License.
   0
 
 
-  0
+  9
 
   
   
@@ -5338,7 +5338,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.TestSwitchToStreamRead.java;>org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.TestRegionSnapshotTask.java;>org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java
 
 
   0
@@ -5352,7 +5352,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.TestRegionSnapshotTask.java;>org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.TestRestoreSnapshotHelper.java;>org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
 
 
   0
@@ -5361,12 +5361,12 @@ under the License.
   0
 
 
-  0
+  2
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.TestRestoreSnapshotHelper.java;>org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.TestSwitchToStreamRead.java;>org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
 
 
   0
@@ -5375,7 +5375,7 @@ under the License.
   0
 
 
-  

[28/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 3c2959e..c233c17 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -405,16 +405,6 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[]row,
-  byte[]family,
-  byte[]qualifier,
-  byte[]value,
-  Deletedelete)
-Deprecated.
-
-
-
-boolean
 Table.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
@@ -425,18 +415,17 @@ service.
 
 
 
-
+
 boolean
-HTable.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
-  CompareFilter.CompareOpcompareOp,
   byte[]value,
   Deletedelete)
 Deprecated.
 
 
-
+
 boolean
 Table.checkAndDelete(byte[]row,
   byte[]family,
@@ -449,18 +438,18 @@ service.
 
 
 
-
+
 boolean
-HTable.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
-  CompareOperatorop,
+  CompareFilter.CompareOpcompareOp,
   byte[]value,
   Deletedelete)
 Deprecated.
 
 
-
+
 boolean
 Table.checkAndDelete(byte[]row,
   byte[]family,
@@ -473,29 +462,40 @@ service.
 
 
 
+
+boolean
+HTable.checkAndDelete(byte[]row,
+  byte[]family,
+  byte[]qualifier,
+  CompareOperatorop,
+  byte[]value,
+  Deletedelete)
+Deprecated.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-RawAsyncTableImpl.delete(Deletedelete)
+AsyncTable.delete(Deletedelete)
+Deletes the specified cells/row.
+
 
 
 void
-HTable.delete(Deletedelete)
-
-
-void
 Table.delete(Deletedelete)
 Deletes the specified cells/row.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 AsyncTableImpl.delete(Deletedelete)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncTable.delete(Deletedelete)
-Deletes the specified cells/row.
-
+RawAsyncTableImpl.delete(Deletedelete)
+
+
+void
+HTable.delete(Deletedelete)
 
 
 private boolean
@@ -508,19 +508,19 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-RawAsyncTableImpl.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
+AsyncTable.CheckAndMutateBuilder.thenDelete(Deletedelete)
 
 
 boolean
-HTable.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
+Table.CheckAndMutateBuilder.thenDelete(Deletedelete)
 
 
-boolean
-Table.CheckAndMutateBuilder.thenDelete(Deletedelete)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+RawAsyncTableImpl.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-AsyncTable.CheckAndMutateBuilder.thenDelete(Deletedelete)
+boolean
+HTable.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
 
 
 
@@ -533,27 +533,27 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 

[10/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
index 9638c2e..ee7e1b8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
@@ -246,7 +246,7 @@
 
 
 protected void
-AssignProcedure.finishTransition(MasterProcedureEnvenv,
+UnassignProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
 
@@ -256,7 +256,7 @@
 
 
 protected void
-UnassignProcedure.finishTransition(MasterProcedureEnvenv,
+AssignProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
 
@@ -307,7 +307,7 @@
 
 
 protected boolean
-AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
+UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in 
java.io">IOExceptionexception)
 
@@ -319,7 +319,7 @@
 
 
 protected boolean
-UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
+AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in 
java.io">IOExceptionexception)
 
@@ -344,10 +344,10 @@
 
 
 protected void
-AssignProcedure.reportTransition(MasterProcedureEnvenv,
+UnassignProcedure.reportTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCodecode,
-longopenSeqNum)
+longseqId)
 
 
 protected abstract void
@@ -358,10 +358,10 @@
 
 
 protected void
-UnassignProcedure.reportTransition(MasterProcedureEnvenv,
+AssignProcedure.reportTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCodecode,
-longseqId)
+longopenSeqNum)
 
 
 private boolean
@@ -372,7 +372,7 @@
 
 
 protected boolean
-AssignProcedure.startTransition(MasterProcedureEnvenv,
+UnassignProcedure.startTransition(MasterProcedureEnvenv,
RegionStates.RegionStateNoderegionNode)
 
 
@@ -382,7 +382,7 @@
 
 
 protected boolean
-UnassignProcedure.startTransition(MasterProcedureEnvenv,
+AssignProcedure.startTransition(MasterProcedureEnvenv,
RegionStates.RegionStateNoderegionNode)
 
 
@@ -399,7 +399,7 @@
 
 
 protected boolean
-AssignProcedure.updateTransition(MasterProcedureEnvenv,
+UnassignProcedure.updateTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
 
@@ -411,7 +411,7 @@
 
 
 protected boolean
-UnassignProcedure.updateTransition(MasterProcedureEnvenv,
+AssignProcedure.updateTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
index b9977f2..0251f89 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
@@ -137,14 +137,6 @@
 
 
 
-protected BaseLoadBalancer.Cluster.Action
-FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
-
-
-(package private) BaseLoadBalancer.Cluster.Action
-FavoredStochasticBalancer.FavoredNodeLoadPicker.generate(BaseLoadBalancer.Clustercluster)
-
-
 (package private) abstract BaseLoadBalancer.Cluster.Action
 StochasticLoadBalancer.CandidateGenerator.generate(BaseLoadBalancer.Clustercluster)
 
@@ -170,6 +162,14 @@
 
 
 protected BaseLoadBalancer.Cluster.Action
+FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
+
+
+(package private) BaseLoadBalancer.Cluster.Action

[31/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 7eb7661..1d50582 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -104,13 +104,13 @@
 
 
 void
-MasterServices.checkTableModifiable(TableNametableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableNametableName)
 
 
 void
-HMaster.checkTableModifiable(TableNametableName)
+MasterServices.checkTableModifiable(TableNametableName)
+Check table is modifiable; i.e.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index b1a475b..a32bb19 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -170,13 +170,13 @@
 
 
 void
-MasterServices.checkTableModifiable(TableNametableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableNametableName)
 
 
 void
-HMaster.checkTableModifiable(TableNametableName)
+MasterServices.checkTableModifiable(TableNametableName)
+Check table is modifiable; i.e.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 90f6967..594e74b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -243,10 +243,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-PrivateCellUtil.getTags(Cellcell)
-
-
-static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
 CellUtil.getTags(Cellcell)
 Deprecated.
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
@@ -254,6 +250,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+
+static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
+PrivateCellUtil.getTags(Cellcell)
+
 
 static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag
 CellUtil.tagsIterator(byte[]tags,
@@ -395,11 +395,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Cell
-PrivateCellUtil.createCell(Cellcell,
-  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
-
-
-static Cell
 CellUtil.createCell(Cellcell,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
 Deprecated.
@@ -407,6 +402,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+
+static Cell
+PrivateCellUtil.createCell(Cellcell,
+  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
+
 
 static byte[]
 TagUtil.fromList(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
@@ -415,17 +415,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-ExtendedCellBuilder
-ExtendedCellBuilderImpl.setTags(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
-
-
 RawCellBuilder
 RawCellBuilder.setTags(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
 
-
+
 ExtendedCellBuilder
 ExtendedCellBuilder.setTags(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
 
+
+ExtendedCellBuilder
+ExtendedCellBuilderImpl.setTags(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 

[04/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
index 01a50f5..733e376 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
@@ -120,19 +120,19 @@
 
 
 protected void
-MoveRegionProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+UnassignProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-GCMergedRegionsProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+MoveRegionProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-AssignProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+GCRegionProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-GCRegionProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+GCMergedRegionsProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
@@ -144,23 +144,23 @@
 
 
 protected void
-UnassignProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+AssignProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-MoveRegionProcedure.serializeStateData(ProcedureStateSerializerserializer)
+UnassignProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-GCMergedRegionsProcedure.serializeStateData(ProcedureStateSerializerserializer)
+MoveRegionProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-AssignProcedure.serializeStateData(ProcedureStateSerializerserializer)
+GCRegionProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-GCRegionProcedure.serializeStateData(ProcedureStateSerializerserializer)
+GCMergedRegionsProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
@@ -172,7 +172,7 @@
 
 
 protected void
-UnassignProcedure.serializeStateData(ProcedureStateSerializerserializer)
+AssignProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 
@@ -212,115 +212,115 @@
 
 
 protected void
-CloneSnapshotProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+DeleteTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-DeleteTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+DisableTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-TruncateTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+DeleteNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-DeleteNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+CreateNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-ServerCrashProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+EnableTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-DisableTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+CreateTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-RecoverMetaProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+ModifyNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-CreateNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+AbstractStateMachineRegionProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-AbstractStateMachineRegionProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+CloneSnapshotProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-EnableTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+ServerCrashProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-CreateTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+ModifyTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-ModifyNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+RecoverMetaProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-ModifyTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+RestoreSnapshotProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-RestoreSnapshotProcedure.deserializeStateData(ProcedureStateSerializerserializer)

[44/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 3e6ed5d..3d558d8 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -125,104 +125,104 @@
 
 
 byte[]
-RawBytes.decode(PositionedByteRangesrc)
+OrderedBlobVar.decode(PositionedByteRangesrc)
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRangesrc)
 
 
-T
-TerminatedWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRangesrc)
+T
+FixedLengthWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
 RawFloat.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRangesrc)
+T
+DataType.decode(PositionedByteRangesrc)
+Read an instance of T from the buffer 
src.
+
 
 
-byte[]
-OrderedBlob.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+RawLong.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRangesrc)
+RawShort.decode(PositionedByteRangesrc)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RawString.decode(PositionedByteRangesrc)
 
+
+byte[]
+RawBytes.decode(PositionedByteRangesrc)
+
 
 byte[]
-OrderedBlobVar.decode(PositionedByteRangesrc)
+OrderedBlob.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
+RawDouble.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long

[40/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index c765780..8b9bf43 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -555,87 +555,87 @@
 
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndDelete(byte[],
 byte[], byte[], byte[], Delete)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndDelete(byte[],
 byte[], byte[], byte[], Delete)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
-
-org.apache.hadoop.hbase.client.HTable.checkAndDelete(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Delete)
-
 
+org.apache.hadoop.hbase.client.HTable.checkAndDelete(byte[],
 byte[], byte[], byte[], Delete)
+
+
 org.apache.hadoop.hbase.client.Table.checkAndDelete(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Delete)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndDelete(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Delete)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndDelete(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Delete)
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndDelete(byte[],
 byte[], byte[], CompareOperator, byte[], Delete)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndDelete(byte[],
 byte[], byte[], CompareOperator, byte[], Delete)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndDelete(byte[],
 byte[], byte[], CompareOperator, byte[], Delete)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndDelete(byte[],
 byte[], byte[], CompareOperator, byte[], Delete)
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndMutate(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], RowMutations)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndMutate(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], RowMutations)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndMutate(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], RowMutations)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndMutate(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], RowMutations)
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndMutate(byte[],
 byte[], byte[], CompareOperator, byte[], RowMutations)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndMutate(byte[],
 byte[], byte[], CompareOperator, byte[], RowMutations)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndMutate(byte[],
 byte[], byte[], CompareOperator, byte[], RowMutations)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndMutate(byte[],
 byte[], byte[], CompareOperator, byte[], RowMutations)
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndPut(byte[],
 byte[], byte[], byte[], Put)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndPut(byte[],
 byte[], byte[], byte[], Put)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndPut(byte[],
 byte[], byte[], byte[], Put)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndPut(byte[],
 byte[], byte[], byte[], Put)
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndPut(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Put)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndPut(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Put)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndPut(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Put)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndPut(byte[],
 byte[], byte[], CompareFilter.CompareOp, byte[], Put)
 
 
-org.apache.hadoop.hbase.client.HTable.checkAndPut(byte[],
 byte[], byte[], CompareOperator, byte[], Put)
-
-
 org.apache.hadoop.hbase.client.Table.checkAndPut(byte[],
 byte[], byte[], CompareOperator, byte[], Put)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
 
 
+
+org.apache.hadoop.hbase.client.HTable.checkAndPut(byte[],
 byte[], byte[], CompareOperator, byte[], Put)
+
 
 org.apache.hadoop.hbase.rest.client.RemoteHTable.checkAndPut(byte[],
 byte[], byte[], CompareOperator, byte[], Put)
 
@@ -718,13 +718,13 @@
 
 
 
-org.apache.hadoop.hbase.client.RowMutations.compareTo(Row)
+org.apache.hadoop.hbase.client.Mutation.compareTo(Row)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
  Use Row.COMPARATOR
 instead
 
 
 
-org.apache.hadoop.hbase.client.Mutation.compareTo(Row)

[01/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site c4828151c -> 828486ae9


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
index 75077a6..5da3ef8 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
@@ -521,33 +521,33 @@
 
 
 
-org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollectionHStoreFile
-StripeStoreFileManager.clearCompactedFiles()
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
 DefaultStoreFileManager.clearCompactedFiles()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
 StoreFileManager.clearCompactedFiles()
 Clears all the compacted files and returns them.
 
 
-
+
 org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollectionHStoreFile
-StripeStoreFileManager.clearFiles()
+StripeStoreFileManager.clearCompactedFiles()
 
-
+
 org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollectionHStoreFile
 DefaultStoreFileManager.clearFiles()
 
-
+
 org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollectionHStoreFile
 StoreFileManager.clearFiles()
 Clears all the files currently in use and returns 
them.
 
 
+
+org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollectionHStoreFile
+StripeStoreFileManager.clearFiles()
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHStoreFile
 HRegion.close()
@@ -597,36 +597,36 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorHStoreFile
-StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
-See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
- for details on this methods.
-
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorHStoreFile
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
+StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
+Gets initial, full list of candidate store files to check 
for row-key-before.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorHStoreFile
-StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
-Gets initial, full list of candidate store files to check 
for row-key-before.
+StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
+See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
+ for details on this methods.
 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
-StripeStoreFileManager.getCompactedfiles()
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
 DefaultStoreFileManager.getCompactedfiles()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
 StoreFileManager.getCompactedfiles()
 List of compacted files inside this store that needs to be 
excluded in reads
  because further new reads will be using only the newly created files out of 
compaction.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
+StripeStoreFileManager.getCompactedfiles()
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
 HStore.getCompactedFiles()
@@ -637,26 +637,26 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionHStoreFile
-StripeStoreFileManager.getFilesForScan(byte[]startRow,
+DefaultStoreFileManager.getFilesForScan(byte[]startRow,
booleanincludeStartRow,
byte[]stopRow,
booleanincludeStopRow)
 
 
 

[21/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
index 4584cda..fb9bdb3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
@@ -137,9 +137,7 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncAdmin.listSnapshots()
-List completed snapshots.
-
+AsyncHBaseAdmin.listSnapshots()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
@@ -148,22 +146,22 @@
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-RawAsyncHBaseAdmin.listSnapshots()
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 HBaseAdmin.listSnapshots()
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
+AsyncAdmin.listSnapshots()
+List completed snapshots.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncHBaseAdmin.listSnapshots()
+RawAsyncHBaseAdmin.listSnapshots()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncAdmin.listSnapshots(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-List all the completed snapshots matching the given 
pattern.
-
+AsyncHBaseAdmin.listSnapshots(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
@@ -172,16 +170,18 @@
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-RawAsyncHBaseAdmin.listSnapshots(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 HBaseAdmin.listSnapshots(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
+AsyncAdmin.listSnapshots(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+List all the completed snapshots matching the given 
pattern.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 

[51/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/828486ae
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/828486ae
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/828486ae

Branch: refs/heads/asf-site
Commit: 828486ae90f80e09263b4021e4418cc23114dcdc
Parents: c482815
Author: jenkins 
Authored: Wed Feb 14 15:13:30 2018 +
Committer: jenkins 
Committed: Wed Feb 14 15:13:30 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apidocs/deprecated-list.html|94 +-
 .../apache/hadoop/hbase/CompareOperator.html| 4 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../hadoop/hbase/MemoryCompactionPolicy.html| 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   304 +-
 .../hadoop/hbase/class-use/CompareOperator.html | 8 +-
 .../hadoop/hbase/class-use/ServerMetrics.html   | 8 +-
 .../hadoop/hbase/class-use/ServerName.html  |40 +-
 .../hadoop/hbase/class-use/TableName.html   |80 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../hadoop/hbase/client/MasterSwitchType.html   | 4 +-
 .../hbase/client/MobCompactPartitionPolicy.html | 4 +-
 .../client/RequestController.ReturnCode.html| 4 +-
 .../hadoop/hbase/client/Scan.ReadType.html  | 4 +-
 .../hadoop/hbase/client/class-use/Append.html   | 8 +-
 .../hbase/client/class-use/Consistency.html | 8 +-
 .../hadoop/hbase/client/class-use/Delete.html   |20 +-
 .../hbase/client/class-use/Durability.html  |20 +-
 .../hadoop/hbase/client/class-use/Get.html  |46 +-
 .../hbase/client/class-use/Increment.html   | 8 +-
 .../hbase/client/class-use/IsolationLevel.html  | 8 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hadoop/hbase/client/class-use/Put.html  |24 +-
 .../hadoop/hbase/client/class-use/Result.html   |22 +-
 .../hbase/client/class-use/ResultScanner.html   |26 +-
 .../hadoop/hbase/client/class-use/Row.html  |14 +-
 .../hbase/client/class-use/RowMutations.html| 8 +-
 .../hadoop/hbase/client/class-use/Scan.html |22 +-
 .../hadoop/hbase/client/package-tree.html   |12 +-
 .../client/security/SecurityCapability.html | 4 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../filter/class-use/ByteArrayComparable.html   | 8 +-
 .../class-use/CompareFilter.CompareOp.html  | 8 +-
 .../filter/class-use/Filter.ReturnCode.html |   114 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |56 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../io/class-use/ImmutableBytesWritable.html|42 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|16 +-
 .../hbase/io/crypto/class-use/Cipher.html   |18 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../mapreduce/class-use/TableRecordReader.html  | 4 +-
 .../org/apache/hadoop/hbase/package-tree.html   | 2 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hbase/quotas/SpaceViolationPolicy.html  | 4 +-
 .../hadoop/hbase/quotas/ThrottleType.html   | 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 apidocs/org/apache/hadoop/hbase/util/Order.html | 4 +-
 .../hadoop/hbase/util/class-use/ByteRange.html  |   124 +-
 .../hadoop/hbase/util/class-use/Bytes.html  |48 +-
 .../hadoop/hbase/util/class-use/Order.html  |44 +-
 .../util/class-use/PositionedByteRange.html |   356 +-
 apidocs/overview-tree.html  |22 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 26802 +
 checkstyle.rss  |   270 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html | 8 +-
 devapidocs/deprecated-list.html |   212 +-
 .../hadoop/hbase/ClusterMetrics.Option.html | 4 +-
 .../hbase/MetaTableAccessor.QueryType.html  | 4 +-