hbase git commit: HBASE-17941 CellArrayMap#getCell may throw IndexOutOfBoundsException

2017-04-20 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master ea3a27b18 -> 33dadc1a9


HBASE-17941 CellArrayMap#getCell may throw IndexOutOfBoundsException

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/33dadc1a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/33dadc1a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/33dadc1a

Branch: refs/heads/master
Commit: 33dadc1a941a536742799a46444c67a1ed66d124
Parents: ea3a27b
Author: s9514171 
Authored: Thu Apr 20 14:54:52 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Apr 21 11:35:39 2017 +0800

--
 .../java/org/apache/hadoop/hbase/regionserver/CellArrayMap.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/33dadc1a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayMap.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayMap.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayMap.java
index 605fea2..898e469 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayMap.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayMap.java
@@ -48,7 +48,7 @@ public class CellArrayMap extends CellFlatMap {
 
   @Override
   protected Cell getCell(int i) {
-if( (i < minCellIdx) && (i >= maxCellIdx) ) return null;
+if( (i < minCellIdx) || (i >= maxCellIdx) ) return null;
 return block[i];
   }
 }



hbase git commit: HBASE-17943 The in-memory flush size is different for each CompactingMemStore located in the same region

2017-04-20 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 87f2bb579 -> ea3a27b18


HBASE-17943 The in-memory flush size is different for each CompactingMemStore 
located in the same region


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ea3a27b1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ea3a27b1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ea3a27b1

Branch: refs/heads/master
Commit: ea3a27b18df875284899b04fbc5fb58a3120e6c7
Parents: 87f2bb5
Author: Chia-Ping Tsai 
Authored: Fri Apr 21 11:20:20 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Apr 21 11:20:20 2017 +0800

--
 .../hadoop/hbase/regionserver/CompactingMemStore.java |  5 +
 .../hbase/regionserver/RegionServicesForStores.java   |  2 +-
 .../TestWalAndCompactingMemStoreFlush.java| 14 +++---
 3 files changed, 17 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ea3a27b1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
index 0c56693..b244997 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
@@ -541,6 +541,11 @@ public class CompactingMemStore extends AbstractMemStore {
 return lowest;
   }
 
+  @VisibleForTesting
+  long getInmemoryFlushSize() {
+return inmemoryFlushSize;
+  }
+
   // debug method
   public void debug() {
 String msg = "active size=" + this.active.keySize();

http://git-wip-us.apache.org/repos/asf/hbase/blob/ea3a27b1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
index ea346ea..8cdfd3b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
@@ -82,7 +82,7 @@ public class RegionServicesForStores {
   }
 
   public int getNumStores() {
-return region.getStores().size();
+return region.getTableDesc().getColumnFamilyCount();
   }
 
   // methods for tests

http://git-wip-us.apache.org/repos/asf/hbase/blob/ea3a27b1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
index aae0a4d..2c16399 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
@@ -108,6 +108,12 @@ public class TestWalAndCompactingMemStoreFlush {
 return new Get(row);
   }
 
+  private void verifyInMemoryFlushSize(Region region) {
+assertEquals(
+  ((CompactingMemStore) 
((HStore)region.getStore(FAMILY1)).memstore).getInmemoryFlushSize(),
+  ((CompactingMemStore) 
((HStore)region.getStore(FAMILY3)).memstore).getInmemoryFlushSize());
+  }
+
   // A helper function to verify edits.
   void verifyEdit(int familyNum, int putNum, Table table) throws IOException {
 Result r = table.get(createGet(familyNum, putNum));
@@ -137,7 +143,7 @@ public class TestWalAndCompactingMemStoreFlush {
 
 // Intialize the region
 Region region = initHRegion("testSelectiveFlushWithEager", conf);
-
+verifyInMemoryFlushSize(region);
 // Add 1200 entries for CF1, 100 for CF2 and 50 for CF3
 for (int i = 1; i <= 1200; i++) {
   region.put(createPut(1, i));// compacted memstore, all the keys 
are unique
@@ -378,7 +384,7 @@ public class TestWalAndCompactingMemStoreFlush {
 
 // Initialize the region
 Region region = initHRegion("testSelectiveFlushWithIndexCompaction", conf);
-
+verifyInMemoryFlushSize(region);
 
/*--*/
 /* PHASE I - insertions */
 // Add 1200 entries for CF1, 100 for CF2 and 50 

hbase git commit: HBASE-13288 Fix naming of parameter in Delete constructor

2017-04-20 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 49cba2c23 -> 87f2bb579


HBASE-13288 Fix naming of parameter in Delete constructor

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/87f2bb57
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/87f2bb57
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/87f2bb57

Branch: refs/heads/master
Commit: 87f2bb5796bd2a05f2c9db559ddd13a33fc80e36
Parents: 49cba2c
Author: Ashish Singhi 
Authored: Thu Mar 19 22:04:25 2015 +0530
Committer: Chia-Ping Tsai 
Committed: Fri Apr 21 11:09:44 2017 +0800

--
 .../org/apache/hadoop/hbase/client/Delete.java| 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/87f2bb57/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index 278ea58..0b3769d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -108,12 +108,12 @@ public class Delete extends Mutation implements 
Comparable {
*
* This timestamp is ONLY used for a delete row operation.  If specifying
* families or columns, you must specify each timestamp individually.
-   * @param rowArray We make a local copy of this passed in row.
+   * @param row We make a local copy of this passed in row.
* @param rowOffset
* @param rowLength
*/
-  public Delete(final byte [] rowArray, final int rowOffset, final int 
rowLength) {
-this(rowArray, rowOffset, rowLength, HConstants.LATEST_TIMESTAMP);
+  public Delete(final byte[] row, final int rowOffset, final int rowLength) {
+this(row, rowOffset, rowLength, HConstants.LATEST_TIMESTAMP);
   }
 
   /**
@@ -125,15 +125,15 @@ public class Delete extends Mutation implements 
Comparable {
*
* This timestamp is ONLY used for a delete row operation.  If specifying
* families or columns, you must specify each timestamp individually.
-   * @param rowArray We make a local copy of this passed in row.
+   * @param row We make a local copy of this passed in row.
* @param rowOffset
* @param rowLength
-   * @param ts maximum version timestamp (only for delete row)
+   * @param timestamp maximum version timestamp (only for delete row)
*/
-  public Delete(final byte [] rowArray, final int rowOffset, final int 
rowLength, long ts) {
-checkRow(rowArray, rowOffset, rowLength);
-this.row = Bytes.copy(rowArray, rowOffset, rowLength);
-setTimestamp(ts);
+  public Delete(final byte[] row, final int rowOffset, final int rowLength, 
long timestamp) {
+checkRow(row, rowOffset, rowLength);
+this.row = Bytes.copy(row, rowOffset, rowLength);
+setTimestamp(timestamp);
   }
 
   /**



hbase git commit: HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete Coprocessor call

2017-04-20 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 af9719e45 -> 93ac76ef6


HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete 
Coprocessor call

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/93ac76ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/93ac76ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/93ac76ef

Branch: refs/heads/branch-1.1
Commit: 93ac76ef6164d8eb183f048ed727dc8b4290e0fa
Parents: af9719e
Author: Abhishek Singh Chouhan 
Authored: Wed Apr 19 12:25:58 2017 +0530
Committer: zhangduo 
Committed: Fri Apr 21 08:55:35 2017 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  |   4 +-
 ...NegativeMemstoreSizeWithSlowCoprocessor.java | 103 +++
 2 files changed, 106 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/93ac76ef/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 7e5da03..3263873 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3207,6 +3207,9 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 
   doRollBackMemstore = false;
+  // update memstore size
+  this.addAndGetGlobalMemstoreSize(addedSize);
+
   // calling the post CP hook for batch mutation
   if (!isInReplay && coprocessorHost != null) {
 MiniBatchOperationInProgress miniBatchOp =
@@ -3252,7 +3255,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 rollbackMemstore(memstoreCells);
 if (writeEntry != null) mvcc.cancelMemstoreInsert(writeEntry);
   } else {
-this.addAndGetGlobalMemstoreSize(addedSize);
 if (writeEntry != null) {
   mvcc.completeMemstoreInsertWithSeqNum(writeEntry, walKey);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/93ac76ef/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
new file mode 100644
index 000..45600c7
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
required by applicable
+ * law or agreed to in writing, software distributed under the License is 
distributed on an "AS IS"
+ * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied. See the License
+ * for the specific language governing permissions and limitations under the 
License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test that verifies we do not have memstore size negative when a 
postPut/Delete hook is
+ * slow/expensive and a flush is triggered at the same time the 

hbase git commit: HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete Coprocessor call

2017-04-20 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 d0d162d1f -> 0b5440d6d


HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete 
Coprocessor call

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b5440d6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b5440d6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b5440d6

Branch: refs/heads/branch-1.2
Commit: 0b5440d6d1a6fb1943917d68655b3abb8bd483b0
Parents: d0d162d
Author: Abhishek Singh Chouhan 
Authored: Wed Apr 19 12:25:58 2017 +0530
Committer: zhangduo 
Committed: Fri Apr 21 08:52:52 2017 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  |   4 +-
 ...NegativeMemstoreSizeWithSlowCoprocessor.java | 103 +++
 2 files changed, 106 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b5440d6/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 8f1a935..c6c611d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3219,6 +3219,9 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 
   doRollBackMemstore = false;
+  // update memstore size
+  this.addAndGetGlobalMemstoreSize(addedSize);
+
   // calling the post CP hook for batch mutation
   if (!isInReplay && coprocessorHost != null) {
 MiniBatchOperationInProgress miniBatchOp =
@@ -3276,7 +3279,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 }
 if (writeEntry != null) mvcc.complete(writeEntry);
   } else {
-this.addAndGetGlobalMemstoreSize(addedSize);
 if (writeEntry != null) {
   mvcc.completeAndWait(writeEntry);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b5440d6/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
new file mode 100644
index 000..45600c7
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
required by applicable
+ * law or agreed to in writing, software distributed under the License is 
distributed on an "AS IS"
+ * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied. See the License
+ * for the specific language governing permissions and limitations under the 
License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test that verifies we do not have memstore size negative when a 
postPut/Delete hook is
+ * slow/expensive and a flush is triggered at the same time the coprocessow is 
doing its work. To
+ * simulate this we call flush from 

hbase git commit: HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete Coprocessor call

2017-04-20 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1 6ea691524 -> d69a6366f


HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete 
Coprocessor call

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d69a6366
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d69a6366
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d69a6366

Branch: refs/heads/branch-1
Commit: d69a6366f6d36ce229df80447998e71ca4654518
Parents: 6ea6915
Author: Abhishek Singh Chouhan 
Authored: Wed Apr 19 12:25:58 2017 +0530
Committer: zhangduo 
Committed: Fri Apr 21 08:50:53 2017 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  |   4 +-
 ...NegativeMemstoreSizeWithSlowCoprocessor.java | 103 +++
 2 files changed, 106 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d69a6366/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 81547d5..d16af6d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3503,6 +3503,9 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 
   doRollBackMemstore = false;
+  // update memstore size
+  this.addAndGetGlobalMemstoreSize(addedSize);
+
   // calling the post CP hook for batch mutation
   if (!isInReplay && coprocessorHost != null) {
 MiniBatchOperationInProgress miniBatchOp =
@@ -3560,7 +3563,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 }
 if (writeEntry != null) mvcc.complete(writeEntry);
   } else {
-this.addAndGetGlobalMemstoreSize(addedSize);
 if (writeEntry != null) {
   mvcc.completeAndWait(writeEntry);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d69a6366/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
new file mode 100644
index 000..45600c7
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
required by applicable
+ * law or agreed to in writing, software distributed under the License is 
distributed on an "AS IS"
+ * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied. See the License
+ * for the specific language governing permissions and limitations under the 
License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test that verifies we do not have memstore size negative when a 
postPut/Delete hook is
+ * slow/expensive and a flush is triggered at the same time the coprocessow is 
doing its work. To
+ * simulate this we call flush from the 

hbase git commit: HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete Coprocessor call

2017-04-20 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 40cc666ac -> 49cba2c23


HBASE-17937 Memstore size becomes negative in case of expensive postPut/Delete 
Coprocessor call

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/49cba2c2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/49cba2c2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/49cba2c2

Branch: refs/heads/master
Commit: 49cba2c237ecc1b3285d942f1ad176ea50c44cd1
Parents: 40cc666
Author: Abhishek Singh Chouhan 
Authored: Wed Apr 19 11:22:23 2017 +0530
Committer: zhangduo 
Committed: Fri Apr 21 08:50:09 2017 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  |   4 +-
 ...NegativeMemstoreSizeWithSlowCoprocessor.java | 104 +++
 2 files changed, 107 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/49cba2c2/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index b21a84d..4836dc8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -3392,6 +3392,9 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 applyFamilyMapToMemstore(familyMaps[i], memstoreSize);
   }
 
+  // update memstore size
+  this.addAndGetMemstoreSize(memstoreSize);
+
   // calling the post CP hook for batch mutation
   if (!replay && coprocessorHost != null) {
 MiniBatchOperationInProgress miniBatchOp =
@@ -3444,7 +3447,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 } finally {
   // Call complete rather than completeAndWait because we probably had 
error if walKey != null
   if (writeEntry != null) mvcc.complete(writeEntry);
-  this.addAndGetMemstoreSize(memstoreSize);
   if (locked) {
 this.updatesLock.readLock().unlock();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/49cba2c2/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
new file mode 100644
index 000..ae2f055
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemstoreSizeWithSlowCoprocessor.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
required by applicable
+ * law or agreed to in writing, software distributed under the License is 
distributed on an "AS IS"
+ * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied. See the License
+ * for the specific language governing permissions and limitations under the 
License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.MemstoreSize;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test that verifies we do not have memstore size negative when a 
postPut/Delete hook 

hbase-site git commit: INFRA-10751 Empty commit

2017-04-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 662ea7dcb -> 9c843314d


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/9c843314
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/9c843314
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/9c843314

Branch: refs/heads/asf-site
Commit: 9c843314d220df62b5e76dbd8f66cbbdf4183480
Parents: 662ea7d
Author: jenkins 
Authored: Thu Apr 20 15:01:47 2017 +
Committer: jenkins 
Committed: Thu Apr 20 15:01:47 2017 +

--

--




[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 81dd768..72dbc96 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -165,8 +165,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.BackupInfo.BackupState
 org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
+org.apache.hadoop.hbase.backup.BackupInfo.BackupState
 org.apache.hadoop.hbase.backup.BackupType
 org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index b53d895..cc1dabe 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -3719,17 +3719,29 @@ service.
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncHBaseAdmin.appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
+
+
 default void
 Admin.appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
  http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
 Append the replicable table-cf config of the specified 
peer
 
 
-
+
 void
 HBaseAdmin.appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
  http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncAdmin.appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
+Append the replicable table-cf config of the 

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index d3e9d63..efbe85b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -393,166 +393,166 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateHMaster m_master
 
 
-
+
 
 
 
 
-m_deadServers
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
+m_servers
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
 
 
-
+
 
 
 
 
-m_deadServers__IsNotDefault
-privateboolean m_deadServers__IsNotDefault
+m_servers__IsNotDefault
+privateboolean m_servers__IsNotDefault
 
 
-
+
 
 
 
 
-m_format
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
+m_deadServers
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
 
 
-
+
 
 
 
 
-m_format__IsNotDefault
-privateboolean m_format__IsNotDefault
+m_deadServers__IsNotDefault
+privateboolean m_deadServers__IsNotDefault
 
 
-
+
 
 
 
 
-m_serverManager
-privateServerManager m_serverManager
+m_frags
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
 
 
-
+
 
 
 
 
-m_serverManager__IsNotDefault
-privateboolean m_serverManager__IsNotDefault
+m_frags__IsNotDefault
+privateboolean m_frags__IsNotDefault
 
 
-
+
 
 
 
 
-m_metaLocation
-privateServerName m_metaLocation
+m_assignmentManager
+privateAssignmentManager m_assignmentManager
 
 
-
+
 
 
 
 
-m_metaLocation__IsNotDefault
-privateboolean m_metaLocation__IsNotDefault
+m_assignmentManager__IsNotDefault
+privateboolean m_assignmentManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled
-privateboolean m_catalogJanitorEnabled
+m_serverManager
+privateServerManager m_serverManager
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled__IsNotDefault
-privateboolean m_catalogJanitorEnabled__IsNotDefault
+m_serverManager__IsNotDefault
+privateboolean m_serverManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_frags
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
+m_filter
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
 
 
-
+
 
 
 
 
-m_frags__IsNotDefault
-privateboolean m_frags__IsNotDefault
+m_filter__IsNotDefault
+privateboolean m_filter__IsNotDefault
 
 
-
+
 
 
 
 
-m_assignmentManager
-privateAssignmentManager m_assignmentManager
+m_catalogJanitorEnabled
+privateboolean m_catalogJanitorEnabled
 
 
-
+
 
 
 
 
-m_assignmentManager__IsNotDefault
-privateboolean m_assignmentManager__IsNotDefault
+m_catalogJanitorEnabled__IsNotDefault
+privateboolean m_catalogJanitorEnabled__IsNotDefault
 
 
-
+
 
 
 
 
-m_servers
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
+m_format
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
 
 
-
+
 
 
 
 
-m_servers__IsNotDefault
-privateboolean m_servers__IsNotDefault
+m_format__IsNotDefault
+privateboolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_filter
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
+m_metaLocation
+privateServerName m_metaLocation
 
 
-
+
 
 
 
 
-m_filter__IsNotDefault
-privateboolean m_filter__IsNotDefault
+m_metaLocation__IsNotDefault
+privateboolean m_metaLocation__IsNotDefault
 
 
 
@@ -598,247 +598,247 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 publicHMastergetMaster()
 
 
-
+
 
 
 
 
-setDeadServers

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
index 1b25783..2ccefa4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
@@ -50,1725 +50,1744 @@
 042import 
org.apache.hadoop.hbase.HBaseConfiguration;
 043import 
org.apache.hadoop.hbase.HColumnDescriptor;
 044import 
org.apache.hadoop.hbase.HTableDescriptor;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.TableName;
-047import 
org.apache.hadoop.hbase.backup.BackupInfo;
-048import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-049import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-050import 
org.apache.hadoop.hbase.backup.BackupType;
-051import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.client.Admin;
-054import 
org.apache.hadoop.hbase.client.Connection;
-055import 
org.apache.hadoop.hbase.client.Delete;
-056import 
org.apache.hadoop.hbase.client.Get;
-057import 
org.apache.hadoop.hbase.client.Put;
-058import 
org.apache.hadoop.hbase.client.Result;
-059import 
org.apache.hadoop.hbase.client.ResultScanner;
-060import 
org.apache.hadoop.hbase.client.Scan;
-061import 
org.apache.hadoop.hbase.client.Table;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-064import 
org.apache.hadoop.hbase.util.Bytes;
-065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-066import 
org.apache.hadoop.hbase.util.Pair;
-067
-068/**
-069 * This class provides API to access 
backup system tablebr
-070 *
-071 * Backup system table 
schema:br
-072 * pul
-073 * li1. Backup sessions rowkey= 
"session:"+backupId; value =serialized BackupInfo/li
-074 * li2. Backup start code rowkey 
= "startcode:"+backupRoot; value = startcode/li
-075 * li3. Incremental backup set 
rowkey="incrbackupset:"+backupRoot; value=[list of tables]/li
-076 * li4. Table-RS-timestamp map 
rowkey="trslm:"+backupRoot+table_name;
-077 * value = map[RS- last WAL 
timestamp]/li
-078 * li5. RS - WAL ts map 
rowkey="rslogts:"+backupRoot +server; value = last WAL timestamp/li
-079 * li6. WALs recorded 
rowkey="wals:"+WAL unique file name;
-080 * value = backupId and full WAL file 
name/li
-081 * /ul/p
-082 */
-083@InterfaceAudience.Private
-084public final class BackupSystemTable 
implements Closeable {
-085  private static final Log LOG = 
LogFactory.getLog(BackupSystemTable.class);
-086
-087  static class WALItem {
-088String backupId;
-089String walFile;
-090String backupRoot;
-091
-092WALItem(String backupId, String 
walFile, String backupRoot) {
-093  this.backupId = backupId;
-094  this.walFile = walFile;
-095  this.backupRoot = backupRoot;
-096}
-097
-098public String getBackupId() {
-099  return backupId;
-100}
-101
-102public String getWalFile() {
-103  return walFile;
-104}
-105
-106public String getBackupRoot() {
-107  return backupRoot;
-108}
-109
-110@Override
-111public String toString() {
-112  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
-113}
-114
-115  }
-116
-117  private TableName tableName;
-118  /**
-119   *  Stores backup sessions (contexts)
-120   */
-121  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
-122  /**
-123   * Stores other meta
-124   */
-125  final static byte[] META_FAMILY = 
"meta".getBytes();
-126  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
-127  /**
-128   *  Connection to HBase cluster, shared 
among all instances
-129   */
-130  private final Connection connection;
-131
+045import 
org.apache.hadoop.hbase.NamespaceDescriptor;
+046import 
org.apache.hadoop.hbase.ServerName;
+047import 
org.apache.hadoop.hbase.TableName;
+048import 
org.apache.hadoop.hbase.backup.BackupInfo;
+049import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+050import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+051import 
org.apache.hadoop.hbase.backup.BackupType;
+052import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
+053import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+054import 
org.apache.hadoop.hbase.client.Admin;
+055import 
org.apache.hadoop.hbase.client.Connection;
+056import 
org.apache.hadoop.hbase.client.Delete;
+057import 
org.apache.hadoop.hbase.client.Get;
+058import 
org.apache.hadoop.hbase.client.Put;

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index e763538..09b6387 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -26,470 +26,562 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import java.util.List;
-021import 
java.util.concurrent.CompletableFuture;
-022import java.util.regex.Pattern;
-023
-024import 
org.apache.hadoop.hbase.HColumnDescriptor;
-025import 
org.apache.hadoop.hbase.HRegionInfo;
-026import 
org.apache.hadoop.hbase.HTableDescriptor;
-027import 
org.apache.hadoop.hbase.ServerName;
-028import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-032import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-033import 
org.apache.hadoop.hbase.util.Pair;
-034
-035/**
-036 *  The asynchronous administrative API 
for HBase.
-037 */
-038@InterfaceAudience.Public
-039public interface AsyncAdmin {
-040
-041  /**
-042   * @return Async Connection used by 
this object.
-043   */
-044  AsyncConnectionImpl getConnection();
+021import java.util.Collection;
+022import java.util.Map;
+023import 
java.util.concurrent.CompletableFuture;
+024import java.util.regex.Pattern;
+025
+026import 
org.apache.hadoop.hbase.HColumnDescriptor;
+027import 
org.apache.hadoop.hbase.HRegionInfo;
+028import 
org.apache.hadoop.hbase.HTableDescriptor;
+029import 
org.apache.hadoop.hbase.ServerName;
+030import 
org.apache.hadoop.hbase.NamespaceDescriptor;
+031import 
org.apache.hadoop.hbase.TableName;
+032import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+033import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
+034import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
+035import 
org.apache.hadoop.hbase.client.replication.TableCFs;
+036import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
+037import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+038import 
org.apache.hadoop.hbase.util.Pair;
+039
+040/**
+041 *  The asynchronous administrative API 
for HBase.
+042 */
+043@InterfaceAudience.Public
+044public interface AsyncAdmin {
 045
 046  /**
-047   * @param tableName Table to check.
-048   * @return True if table exists 
already. The return value will be wrapped by a
-049   * {@link CompletableFuture}.
-050   */
-051  CompletableFutureBoolean 
tableExists(final TableName tableName);
-052
-053  /**
-054   * List all the userspace tables.
-055   * @return - returns an array of 
HTableDescriptors wrapped by a {@link CompletableFuture}.
-056   * @see #listTables(Pattern, boolean)
-057   */
-058  
CompletableFutureHTableDescriptor[] listTables();
-059
-060  /**
-061   * List all the tables matching the 
given pattern.
-062   * @param regex The regular expression 
to match against
-063   * @param includeSysTables False to 
match only against userspace tables
-064   * @return - returns an array of 
HTableDescriptors wrapped by a {@link CompletableFuture}.
-065   * @see #listTables(Pattern, boolean)
-066   */
-067  
CompletableFutureHTableDescriptor[] listTables(String regex, boolean 
includeSysTables);
-068
-069  /**
-070   * List all the tables matching the 
given pattern.
-071   * @param pattern The compiled regular 
expression to match against
-072   * @param includeSysTables False to 
match only against userspace tables
-073   * @return - returns an array of 
HTableDescriptors wrapped by a {@link CompletableFuture}.
-074   */
-075  
CompletableFutureHTableDescriptor[] listTables(Pattern pattern, boolean 
includeSysTables);
-076
-077  /**
-078   * List all of the names of userspace 
tables.
-079   * @return TableName[] an array of 
table names wrapped by a {@link CompletableFuture}.
-080   * @see #listTableNames(Pattern, 
boolean)
-081   */
-082  CompletableFutureTableName[] 
listTableNames();
-083
-084  /**
-085   * List all of the names of userspace 
tables.
-086   * @param regex The regular expression 
to match against
-087   * @param includeSysTables False to 
match only against userspace tables
-088   * @return TableName[] an array of 
table names wrapped by a {@link CompletableFuture}.
-089   * @see #listTableNames(Pattern, 
boolean)
-090   */
-091  CompletableFutureTableName[] 
listTableNames(final String regex, final boolean includeSysTables);
-092
-093  /**
-094   * List all of the names of userspace 
tables.
-095   * @param pattern The regular 
expression to match against
-096   * @param includeSysTables False to 
match only against userspace tables
-097   * @return TableName[] an array of 
table names 

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 472bd89..3760f20 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
+public interface AsyncAdmin
 The asynchronous administrative API for HBase.
 
 
@@ -135,68 +135,82 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-assign(byte[]regionName)
+addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+  ReplicationPeerConfigpeerConfig)
+Add a new replication peer for replicating data to slave 
cluster
+
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
+Append the replicable table-cf config of the specified 
peer
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+assign(byte[]regionName)
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 balancer()
 Invoke the balancer.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 balancer(booleanforce)
 Invoke the balancer.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 closeRegion(byte[]regionname,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index a71edcc..3491926 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2017 The Apache Software Foundation
 
   File: 2146,
- Errors: 14215,
+ Errors: 14236,
  Warnings: 0,
  Infos: 0
   
@@ -9995,7 +9995,7 @@ under the License.
   0
 
 
-  28
+  31
 
   
   
@@ -11017,7 +11017,7 @@ under the License.
   0
 
 
-  9
+  10
 
   
   
@@ -15721,7 +15721,7 @@ under the License.
   0
 
 
-  24
+  38
 
   
   
@@ -19109,7 +19109,7 @@ under the License.
   0
 
 
-  13
+  15
 
   
   
@@ -29021,7 +29021,7 @@ under the License.
   0
 
 
-  80
+  81
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/coc.html
--
diff --git a/coc.html b/coc.html
index 3f8a6c6..ce6b621 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-04-19
+  Last Published: 
2017-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 8c52606..ce2b4ba 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-04-19
+  Last Published: 
2017-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 0327086..9e35c8f 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -524,7 +524,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-04-19
+  Last Published: 
2017-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 0b5096f..251ae23 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -293,10 +293,10 @@
 33
 
 Number of dependencies (NOD):
-95
+93
 
 Number of unique artifacts (NOA):
-96
+94
 
 Number of SNAPSHOT artifacts (NOS):
 0
@@ -1834,31 +1834,7 @@
 1.7.7
 
 
-http://hbase.apache.org/hbase-testing-util;>org.apache.hbase:hbase-testing-util
-
-tomcat:jasper-compiler
-
-
-
-
-
-
-5.5.23
-
-
-http://hbase.apache.org/hbase-server;>org.apache.hbase:hbase-server
-
-tomcat:jasper-runtime
-
-
-
-
-
-
-5.5.23
-
-
-http://hbase.apache.org/hbase-server;>org.apache.hbase:hbase-server
+http://hbase.apache.org/hbase-testing-util;>org.apache.hbase:hbase-testing-util
   
   
 
@@ -1871,7 +1847,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-04-19
+  Last Published: 
2017-04-20
 
 
 


[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableOperator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableOperator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableOperator.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableOperator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableOperator.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-086import 

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index e763538..09b6387 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -26,470 +26,562 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import java.util.List;
-021import 
java.util.concurrent.CompletableFuture;
-022import java.util.regex.Pattern;
-023
-024import 
org.apache.hadoop.hbase.HColumnDescriptor;
-025import 
org.apache.hadoop.hbase.HRegionInfo;
-026import 
org.apache.hadoop.hbase.HTableDescriptor;
-027import 
org.apache.hadoop.hbase.ServerName;
-028import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-032import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-033import 
org.apache.hadoop.hbase.util.Pair;
-034
-035/**
-036 *  The asynchronous administrative API 
for HBase.
-037 */
-038@InterfaceAudience.Public
-039public interface AsyncAdmin {
-040
-041  /**
-042   * @return Async Connection used by 
this object.
-043   */
-044  AsyncConnectionImpl getConnection();
+021import java.util.Collection;
+022import java.util.Map;
+023import 
java.util.concurrent.CompletableFuture;
+024import java.util.regex.Pattern;
+025
+026import 
org.apache.hadoop.hbase.HColumnDescriptor;
+027import 
org.apache.hadoop.hbase.HRegionInfo;
+028import 
org.apache.hadoop.hbase.HTableDescriptor;
+029import 
org.apache.hadoop.hbase.ServerName;
+030import 
org.apache.hadoop.hbase.NamespaceDescriptor;
+031import 
org.apache.hadoop.hbase.TableName;
+032import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+033import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
+034import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
+035import 
org.apache.hadoop.hbase.client.replication.TableCFs;
+036import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
+037import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+038import 
org.apache.hadoop.hbase.util.Pair;
+039
+040/**
+041 *  The asynchronous administrative API 
for HBase.
+042 */
+043@InterfaceAudience.Public
+044public interface AsyncAdmin {
 045
 046  /**
-047   * @param tableName Table to check.
-048   * @return True if table exists 
already. The return value will be wrapped by a
-049   * {@link CompletableFuture}.
-050   */
-051  CompletableFutureBoolean 
tableExists(final TableName tableName);
-052
-053  /**
-054   * List all the userspace tables.
-055   * @return - returns an array of 
HTableDescriptors wrapped by a {@link CompletableFuture}.
-056   * @see #listTables(Pattern, boolean)
-057   */
-058  
CompletableFutureHTableDescriptor[] listTables();
-059
-060  /**
-061   * List all the tables matching the 
given pattern.
-062   * @param regex The regular expression 
to match against
-063   * @param includeSysTables False to 
match only against userspace tables
-064   * @return - returns an array of 
HTableDescriptors wrapped by a {@link CompletableFuture}.
-065   * @see #listTables(Pattern, boolean)
-066   */
-067  
CompletableFutureHTableDescriptor[] listTables(String regex, boolean 
includeSysTables);
-068
-069  /**
-070   * List all the tables matching the 
given pattern.
-071   * @param pattern The compiled regular 
expression to match against
-072   * @param includeSysTables False to 
match only against userspace tables
-073   * @return - returns an array of 
HTableDescriptors wrapped by a {@link CompletableFuture}.
-074   */
-075  
CompletableFutureHTableDescriptor[] listTables(Pattern pattern, boolean 
includeSysTables);
-076
-077  /**
-078   * List all of the names of userspace 
tables.
-079   * @return TableName[] an array of 
table names wrapped by a {@link CompletableFuture}.
-080   * @see #listTableNames(Pattern, 
boolean)
-081   */
-082  CompletableFutureTableName[] 
listTableNames();
-083
-084  /**
-085   * List all of the names of userspace 
tables.
-086   * @param regex The regular expression 
to match against
-087   * @param includeSysTables False to 
match only against userspace tables
-088   * @return TableName[] an array of 
table names wrapped by a {@link CompletableFuture}.
-089   * @see #listTableNames(Pattern, 
boolean)
-090   */
-091  CompletableFutureTableName[] 
listTableNames(final String regex, final boolean includeSysTables);
-092
-093  /**
-094   * List all of the names of userspace 
tables.
-095   * @param pattern The regular 
expression to match against
-096   * @param includeSysTables False to 
match only against userspace tables
-097   * @return TableName[] an array of 

[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 17c9e19..9c387f2 100644
--- a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -102,7 +102,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
+public interface AsyncAdmin
 The asynchronous administrative API for HBase.
 
 
@@ -131,68 +131,82 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-assign(byte[]regionName)
+addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+  ReplicationPeerConfigpeerConfig)
+Add a new replication peer for replicating data to slave 
cluster
+
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
+Append the replicable table-cf config of the specified 
peer
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+assign(byte[]regionName)
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 balancer()
 Invoke the balancer.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 balancer(booleanforce)
 Invoke the balancer.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 closeRegion(byte[]regionname,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringserverName)
 Close a 

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 38b2b41..0948fd9 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":9,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":9,"i52":10,"i53":9,"i54":9,"i55":9,"i56":10,"i57":9,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":9,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":9,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":9,"i52":10,"i53":9,"i54":9,"i55":9,"i56":10,"i57":9,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":9,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class BackupSystemTable
+public final class BackupSystemTable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 This class provides API to access backup system table
@@ -752,35 +752,39 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 private void
-waitForSystemTable(Adminadmin)
+verifyNamespaceExists(Adminadmin)
 
 
+private void
+waitForSystemTable(Adminadmin)
+
+
 void
 writeBackupStartCode(http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">LongstartCode,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupRoot)
 Write the start code (timestamp) to backup system 
table.
 
 
-
+
 void
 writeBulkLoadedFiles(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNamesTableList,
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Path[]maps,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringbackupId)
 
-
+
 void
 writeFilesForBulkLoadPreCommit(TableNametabName,
   byte[]region,
   byte[]family,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairorg.apache.hadoop.fs.Path,org.apache.hadoop.fs.Pathpairs)
 
-
+
 void
 writePathsPostBulkLoad(TableNametabName,
   byte[]region,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.PathfinalPaths)
 
-
+
 void
 writeRegionServerLastLogRollResult(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 1b25783..2ccefa4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -50,1725 +50,1744 @@
 042import 
org.apache.hadoop.hbase.HBaseConfiguration;
 043import 
org.apache.hadoop.hbase.HColumnDescriptor;
 044import 
org.apache.hadoop.hbase.HTableDescriptor;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.TableName;
-047import 
org.apache.hadoop.hbase.backup.BackupInfo;
-048import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-049import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-050import 
org.apache.hadoop.hbase.backup.BackupType;
-051import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.client.Admin;
-054import 
org.apache.hadoop.hbase.client.Connection;
-055import 
org.apache.hadoop.hbase.client.Delete;
-056import 
org.apache.hadoop.hbase.client.Get;
-057import 
org.apache.hadoop.hbase.client.Put;
-058import 
org.apache.hadoop.hbase.client.Result;
-059import 
org.apache.hadoop.hbase.client.ResultScanner;
-060import 
org.apache.hadoop.hbase.client.Scan;
-061import 
org.apache.hadoop.hbase.client.Table;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-064import 
org.apache.hadoop.hbase.util.Bytes;
-065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-066import 
org.apache.hadoop.hbase.util.Pair;
-067
-068/**
-069 * This class provides API to access 
backup system tablebr
-070 *
-071 * Backup system table 
schema:br
-072 * pul
-073 * li1. Backup sessions rowkey= 
"session:"+backupId; value =serialized BackupInfo/li
-074 * li2. Backup start code rowkey 
= "startcode:"+backupRoot; value = startcode/li
-075 * li3. Incremental backup set 
rowkey="incrbackupset:"+backupRoot; value=[list of tables]/li
-076 * li4. Table-RS-timestamp map 
rowkey="trslm:"+backupRoot+table_name;
-077 * value = map[RS- last WAL 
timestamp]/li
-078 * li5. RS - WAL ts map 
rowkey="rslogts:"+backupRoot +server; value = last WAL timestamp/li
-079 * li6. WALs recorded 
rowkey="wals:"+WAL unique file name;
-080 * value = backupId and full WAL file 
name/li
-081 * /ul/p
-082 */
-083@InterfaceAudience.Private
-084public final class BackupSystemTable 
implements Closeable {
-085  private static final Log LOG = 
LogFactory.getLog(BackupSystemTable.class);
-086
-087  static class WALItem {
-088String backupId;
-089String walFile;
-090String backupRoot;
-091
-092WALItem(String backupId, String 
walFile, String backupRoot) {
-093  this.backupId = backupId;
-094  this.walFile = walFile;
-095  this.backupRoot = backupRoot;
-096}
-097
-098public String getBackupId() {
-099  return backupId;
-100}
-101
-102public String getWalFile() {
-103  return walFile;
-104}
-105
-106public String getBackupRoot() {
-107  return backupRoot;
-108}
-109
-110@Override
-111public String toString() {
-112  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
-113}
-114
-115  }
-116
-117  private TableName tableName;
-118  /**
-119   *  Stores backup sessions (contexts)
-120   */
-121  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
-122  /**
-123   * Stores other meta
-124   */
-125  final static byte[] META_FAMILY = 
"meta".getBytes();
-126  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
-127  /**
-128   *  Connection to HBase cluster, shared 
among all instances
-129   */
-130  private final Connection connection;
-131
+045import 
org.apache.hadoop.hbase.NamespaceDescriptor;
+046import 
org.apache.hadoop.hbase.ServerName;
+047import 
org.apache.hadoop.hbase.TableName;
+048import 
org.apache.hadoop.hbase.backup.BackupInfo;
+049import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+050import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+051import 
org.apache.hadoop.hbase.backup.BackupType;
+052import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
+053import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+054import 
org.apache.hadoop.hbase.client.Admin;
+055import 
org.apache.hadoop.hbase.client.Connection;
+056import 
org.apache.hadoop.hbase.client.Delete;
+057import 
org.apache.hadoop.hbase.client.Get;
+058import 
org.apache.hadoop.hbase.client.Put;
+059import 

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index aa81f42..2ad5484 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
+private class AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
 extends AsyncHBaseAdmin.TableProcedureBiConsumer
 
 
@@ -240,7 +240,7 @@ extends 
 
 AddColumnFamilyProcedureBiConsumer
-AddColumnFamilyProcedureBiConsumer(AsyncAdminadmin,
+AddColumnFamilyProcedureBiConsumer(AsyncAdminadmin,
TableNametableName)
 
 
@@ -258,7 +258,7 @@ extends 
 
 getOperationType
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
 
 Specified by:
 getOperationTypein
 classAsyncHBaseAdmin.TableProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
index 0f7725a..1e4501d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true;
 title="class or interface in java.lang">@FunctionalInterface
-private static interface AsyncHBaseAdmin.AdminRpcCallRESP,REQ
+private static interface AsyncHBaseAdmin.AdminRpcCallRESP,REQ
 
 
 
@@ -159,7 +159,7 @@ private static interface 
 
 call
-voidcall(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interfacestub,
+voidcall(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interfacestub,
   HBaseRpcControllercontroller,
   REQreq,
   org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallbackRESPdone)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
index 49c05db..9c001e7 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true;
 title="class or interface in java.lang">@FunctionalInterface
-private static interface AsyncHBaseAdmin.ConverterD,S
+private static interface AsyncHBaseAdmin.ConverterD,S
 
 
 
@@ -156,7 +156,7 @@ private static interface 
 
 convert
-Dconvert(Ssrc)
+Dconvert(Ssrc)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index de862cd..6909daa 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
+private class AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
 extends 

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ReplicationState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ReplicationState.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ReplicationState.html
index ec37af9..6da13e0 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ReplicationState.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ReplicationState.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static enum HBaseAdmin.ReplicationState
+private static enum HBaseAdmin.ReplicationState
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumHBaseAdmin.ReplicationState
 This enum indicates the current state of the replication 
for a given table.
 
@@ -214,7 +214,7 @@ the order they are declared.
 
 
 ENABLED
-public static finalHBaseAdmin.ReplicationState 
ENABLED
+public static finalHBaseAdmin.ReplicationState 
ENABLED
 
 
 
@@ -223,7 +223,7 @@ the order they are declared.
 
 
 MIXED
-public static finalHBaseAdmin.ReplicationState 
MIXED
+public static finalHBaseAdmin.ReplicationState 
MIXED
 
 
 
@@ -232,7 +232,7 @@ the order they are declared.
 
 
 DISABLED
-public static finalHBaseAdmin.ReplicationState 
DISABLED
+public static finalHBaseAdmin.ReplicationState 
DISABLED
 
 
 
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public staticHBaseAdmin.ReplicationState[]values()
+public staticHBaseAdmin.ReplicationState[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -269,7 +269,7 @@ for (HBaseAdmin.ReplicationState c : 
HBaseAdmin.ReplicationState.values())
 
 
 valueOf
-public staticHBaseAdmin.ReplicationStatevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticHBaseAdmin.ReplicationStatevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
index 2d30231..b20e6ef 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
@@ -6365,7 +6365,7 @@ publicbyte[][]
 
 removeReplicationPeerTableCFs
-publicvoidremoveReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+publicvoidremoveReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCfs)
throws ReplicationException,
   http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -6389,7 +6389,7 @@ publicbyte[][]
 
 listReplicationPeers
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationPeerDescriptionlistReplicationPeers()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationPeerDescriptionlistReplicationPeers()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:Admin
 Return a list of replication peers.
@@ -6409,7 +6409,7 @@ publicbyte[][]
 
 listReplicationPeers
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
index 71224a6..f56b0c1 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
@@ -238,40 +238,40 @@ implements HRegionServer regionServer
 
 
-
+
 
 
 
 
-bcn
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcn
+filter
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String filter
 
 
-
+
 
 
 
 
-format
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String format
+bcv
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcv
 
 
-
+
 
 
 
 
-filter
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String filter
+format
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String format
 
 
-
+
 
 
 
 
-bcv
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcv
+bcn
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcn
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 62098ec..ba68958 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -519,14 +519,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
+org.apache.hadoop.hbase.util.PrettyPrinter.Unit
+org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
+org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
 org.apache.hadoop.hbase.util.PoolMap.PoolType
-org.apache.hadoop.hbase.util.ChecksumType
 org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
-org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
+org.apache.hadoop.hbase.util.ChecksumType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index cf9fd6c..b6ac79d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"2.0.0-SNAPSHOT";
-011  public static final String revision = 
"3acd8e4644c111560502ecc06e10d04dd204a06a";
+011  public static final String revision = 
"40cc666ac984e846a8c7105b771ce6bec90c4ad3";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Wed 
Apr 19 14:41:52 UTC 2017";
+013  public static final String date = "Thu 
Apr 20 14:41:34 UTC 2017";
 014  public static final String url = 
"git://asf920.gq1.ygridcore.net/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "f9e331dc39b490ff4b8f95f772f9ad5b";
+015  

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 8d717f9..4df4527 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -175,14 +175,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
-org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index a9d63de..865e6f2 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
-org.apache.hadoop.hbase.io.hfile.BlockType
-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
+org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
 org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
 org.apache.hadoop.hbase.io.hfile.BlockPriority
-org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
+org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
+org.apache.hadoop.hbase.io.hfile.BlockType
+org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 67bfe99..56dc824 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -326,9 +326,9 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
-org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
 org.apache.hadoop.hbase.ipc.CallEvent.Type
+org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
+org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 10c9864..2a07d75 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ 

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-086import 

[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(

[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(
-3960  

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-086import 

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 54284ba..07fd427 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 2146
 0
 0
-14215
+14236
 
 Files
 
@@ -677,7 +677,7 @@
 org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 0
 0
-24
+38
 
 org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
 0
@@ -757,7 +757,7 @@
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
 0
-13
+15
 
 org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
 0
@@ -787,7 +787,7 @@
 org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
 0
 0
-28
+31
 
 org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
 0
@@ -927,7 +927,7 @@
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
-80
+81
 
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
@@ -1282,7 +1282,7 @@
 org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
 0
 0
-9
+10
 
 org/apache/hadoop/hbase/client/replication/TableCFs.java
 0
@@ -7076,7 +7076,7 @@
 ordered: true
 sortStaticImportsAlphabetically: true
 option: top
-903
+905
 Error
 
 
@@ -7088,7 +7088,7 @@
 http://checkstyle.sourceforge.net/config_imports.html#UnusedImports;>UnusedImports
 
 processJavadoc: true
-74
+76
 Error
 
 indentation
@@ -7099,7 +7099,7 @@
 caseIndent: 2
 basicOffset: 2
 lineWrappingIndentation: 2
-5068
+5081
 Error
 
 javadoc
@@ -7129,7 +7129,7 @@
 
 max: 100
 ignorePattern: ^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated
-691
+694
 Error
 
 
@@ -7149,7 +7149,7 @@
 
 
 http://checkstyle.sourceforge.net/config_whitespace.html#ParenPad;>ParenPad
-109
+110
 Error
 
 Details
@@ -13332,145 +13332,229 @@
 indentation
 Indentation
 'member def modifier' have incorrect indentation level 1, expected level 
should be 2.
-153
+154
+
+Error
+indentation
+Indentation
+'method def' child have incorrect indentation level 6, expected level 
should be 4.
+181
+
+Error
+indentation
+Indentation
+'method def' child have incorrect indentation level 6, expected level 
should be 4.
+182
+
+Error
+indentation
+Indentation
+'method def' child have incorrect indentation level 6, expected level 
should be 4.
+183
+
+Error
+indentation
+Indentation
+'method def' child have incorrect indentation level 6, expected level 
should be 4.
+184
+
+Error
+indentation
+Indentation
+'for' have incorrect indentation level 6, expected level should be 4.
+185
+
+Error
+whitespace
+ParenPad
+'(' is followed by whitespace.
+185
+
+Error
+indentation
+Indentation
+'if' have incorrect indentation level 8, expected level should be 6.
+186
+
+Error
+indentation
+Indentation
+'if' child have incorrect indentation level 10, expected level should be 
8.
+187
+
+Error
+indentation
+Indentation
+'if' child have incorrect indentation level 10, expected level should be 
8.
+188
+
+Error
+indentation
+Indentation
+'if rcurly' have incorrect indentation level 8, expected level should be 
6.
+189
+
+Error
+indentation
+Indentation
+'for rcurly' have incorrect indentation level 6, expected level should be 
4.
+190
+
+Error
+indentation
+Indentation
+'if' have incorrect indentation level 6, expected level should be 4.
+191
+
+Error
+indentation
+Indentation
+'if' child have incorrect indentation level 8, expected level should be 
6.
+192
+
+Error
+indentation
+Indentation
+'if rcurly' have incorrect indentation level 6, expected level should be 
4.
+193
 
 Error
 coding
 EmptyStatement
 Empty statement.
-290
+309
 
 Error
 indentation
 Indentation
 'readBulkloadRows' have incorrect indentation level 2, expected level 
should be 4.
-390
+409
 
 Error
 blocks
 NeedBraces
 'else' construct must use '{}'s.
-422
+441
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-458
+477
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-608
+627
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-618
+637
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-623
+642
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-637
+656
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-640
+659
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-645
+664
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-670
+689
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-680
+699
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-697
+716
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1062
+1081
 
 Error
 javadoc
 

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DeleteTableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 0e23717..f259f69 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-04-19
+  Last Published: 
2017-04-20
 
 
 



[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/apidocs/org/apache/hadoop/hbase/client/replication/class-use/TableCFs.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/replication/class-use/TableCFs.html 
b/apidocs/org/apache/hadoop/hbase/client/replication/class-use/TableCFs.html
index 6e5fb95..8dd04cb 100644
--- a/apidocs/org/apache/hadoop/hbase/client/replication/class-use/TableCFs.html
+++ b/apidocs/org/apache/hadoop/hbase/client/replication/class-use/TableCFs.html
@@ -105,6 +105,12 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableCFs
+AsyncAdmin.listReplicatedTableCFs()
+Find all table and column families that are replicated from 
this cluster
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableCFs
 Admin.listReplicatedTableCFs()
 Find all table and column families that are replicated from 
this cluster

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
index 5400453..fbfe74b 100644
--- 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
+++ 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
@@ -123,6 +123,21 @@
 
 
 
+Methods in org.apache.hadoop.hbase.client
 that return types with arguments of type ReplicationPeerConfig
+
+Modifier and Type
+Method and Description
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureReplicationPeerConfig
+AsyncAdmin.getReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
+Returns the configured ReplicationPeerConfig for the 
specified peer
+
+
+
+
+
 Methods in org.apache.hadoop.hbase.client
 with parameters of type ReplicationPeerConfig
 
 Modifier and Type
@@ -130,12 +145,26 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncAdmin.addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+  ReplicationPeerConfigpeerConfig)
+Add a new replication peer for replicating data to slave 
cluster
+
+
+
 default void
 Admin.addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
   ReplicationPeerConfigpeerConfig)
 Add a new replication peer for replicating data to slave 
cluster
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncAdmin.updateReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+   ReplicationPeerConfigpeerConfig)
+Update the peerConfig for the specified peer
+
+
 
 default void
 Admin.updateReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerDescription.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerDescription.html
 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerDescription.html
index 9618f08..f5d4b42 100644
--- 
a/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerDescription.html
+++ 
b/apidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerDescription.html
@@ -105,11 +105,23 @@
 
 
 

[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;

[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index 6a84cc0..5b5663a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class AsyncHBaseAdmin
+public class AsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -304,6 +304,13 @@ implements 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+  ReplicationPeerConfigpeerConfig)
+Add a new replication peer for replicating data to slave 
cluster
+
+
+
 private PREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureRESP
 adminCall(HBaseRpcControllercontroller,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interfacestub,
@@ -311,29 +318,36 @@ implements AsyncHBaseAdmin.AdminRpcCallPRESP,PREQrpcCall,
  AsyncHBaseAdmin.ConverterRESP,PRESPrespConverter)
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCfs)
+Append the replicable table-cf config of the specified 
peer
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 assign(byte[]regionName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.EnableTableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-086import 

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 932a1c360 -> 662ea7dcb


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  

[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/662ea7dc
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/662ea7dc
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/662ea7dc

Branch: refs/heads/asf-site
Commit: 662ea7dcb7235412ba4e66e2b2f1d249ead5d4bb
Parents: 932a1c3
Author: jenkins 
Authored: Thu Apr 20 15:01:09 2017 +
Committer: jenkins 
Committed: Thu Apr 20 15:01:09 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/index-all.html  |48 +
 .../hadoop/hbase/class-use/TableName.html   |16 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   480 +-
 .../client/replication/class-use/TableCFs.html  | 6 +
 .../class-use/ReplicationPeerConfig.html|29 +
 .../class-use/ReplicationPeerDescription.html   |18 +
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |  1010 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 28026 +
 checkstyle.rss  |12 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |34 +-
 dependency-info.html| 4 +-
 dependency-management.html  |18 +-
 devapidocs/constant-values.html | 8 +-
 devapidocs/index-all.html   |80 +
 .../backup/impl/BackupSystemTable.WALItem.html  |18 +-
 .../hbase/backup/impl/BackupSystemTable.html|   254 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../hadoop/hbase/class-use/TableName.html   |45 +-
 .../hbase/classification/package-tree.html  | 6 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   480 +-
 ...dmin.AddColumnFamilyProcedureBiConsumer.html | 6 +-
 .../client/AsyncHBaseAdmin.AdminRpcCall.html| 4 +-
 .../hbase/client/AsyncHBaseAdmin.Converter.html | 4 +-
 ...dmin.CreateNamespaceProcedureBiConsumer.html | 6 +-
 ...aseAdmin.CreateTableProcedureBiConsumer.html | 6 +-
 ...n.DeleteColumnFamilyProcedureBiConsumer.html | 6 +-
 ...dmin.DeleteNamespaceProcedureBiConsumer.html | 6 +-
 ...aseAdmin.DeleteTableProcedureBiConsumer.html | 8 +-
 ...seAdmin.DisableTableProcedureBiConsumer.html | 6 +-
 ...aseAdmin.EnableTableProcedureBiConsumer.html | 6 +-
 .../client/AsyncHBaseAdmin.MasterRpcCall.html   | 4 +-
 ...min.MergeTableRegionProcedureBiConsumer.html | 6 +-
 ...n.ModifyColumnFamilyProcedureBiConsumer.html | 6 +-
 ...dmin.ModifyNamespaceProcedureBiConsumer.html | 6 +-
 ...HBaseAdmin.NamespaceProcedureBiConsumer.html |14 +-
 .../AsyncHBaseAdmin.ProcedureBiConsumer.html|12 +-
 .../client/AsyncHBaseAdmin.TableOperator.html   | 4 +-
 ...syncHBaseAdmin.TableProcedureBiConsumer.html |14 +-
 ...eAdmin.TruncateTableProcedureBiConsumer.html | 6 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.html|   615 +-
 .../client/HBaseAdmin.ReplicationState.html |12 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.html  |34 +-
 .../hadoop/hbase/client/class-use/Admin.html| 4 +
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../replication/ReplicationSerDeHelper.html |   105 +-
 .../client/replication/class-use/TableCFs.html  |12 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 8 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 4 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../org/apache/hadoop/hbase/package-tree.html   |10 +-
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../hadoop/hbase/regionserver/package-tree.html |18 +-
 .../regionserver/querymatcher/package-tree.html | 4 +-
 .../class-use/ReplicationException.html | 6 +
 .../class-use/ReplicationPeerConfig.html|58 +-
 .../class-use/ReplicationPeerDescription.html   |36 +-
 .../hadoop/hbase/rest/model/package-tree.html   | 2 +-
 .../hbase/security/access/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |   270 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html |   108 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |54 +-
 .../regionserver/RSStatusTmpl.ImplData.html |   120 +-
 

[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(
-3960  

[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index ac4a9b3..be839b7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-087import 

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(
-3960  

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 

[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(
-3960  

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(
-3960  

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.NamespaceFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.NamespaceFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.NamespaceFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.NamespaceFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.NamespaceFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 
master.listReplicationPeers(
-3960  

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
index b798d4b..8c56a67 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
@@ -3877,425 +3877,371 @@
 3869  throw new 
ReplicationException("tableCfs is null");
 3870}
 3871ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3872MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3873if (preTableCfs == null) {
-3874  
peerConfig.setTableCFsMap(tableCfs);
-3875} else {
-3876  for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3877TableName table = 
entry.getKey();
-3878CollectionString 
appendCfs = entry.getValue();
-3879if 
(preTableCfs.containsKey(table)) {
-3880  ListString cfs = 
preTableCfs.get(table);
-3881  if (cfs == null || appendCfs 
== null || appendCfs.isEmpty()) {
-3882preTableCfs.put(table, 
null);
-3883  } else {
-3884SetString cfSet = 
new HashSetString(cfs);
-3885cfSet.addAll(appendCfs);
-3886preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3887  }
-3888} else {
-3889  if (appendCfs == null || 
appendCfs.isEmpty()) {
-3890preTableCfs.put(table, 
null);
-3891  } else {
-3892preTableCfs.put(table, 
Lists.newArrayList(appendCfs));
-3893  }
-3894}
-3895  }
-3896}
-3897updateReplicationPeerConfig(id, 
peerConfig);
-3898  }
-3899
-3900  @Override
-3901  public void 
removeReplicationPeerTableCFs(String id,
-3902  MapTableName, ? extends 
CollectionString tableCfs) throws ReplicationException,
-3903  IOException {
-3904if (tableCfs == null) {
-3905  throw new 
ReplicationException("tableCfs is null");
-3906}
-3907ReplicationPeerConfig peerConfig = 
getReplicationPeerConfig(id);
-3908MapTableName, 
ListString preTableCfs = peerConfig.getTableCFsMap();
-3909if (preTableCfs == null) {
-3910  throw new 
ReplicationException("Table-Cfs for peer" + id + " is null");
-3911}
-3912for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-3913
-3914  TableName table = 
entry.getKey();
-3915  CollectionString removeCfs 
= entry.getValue();
-3916  if 
(preTableCfs.containsKey(table)) {
-3917ListString cfs = 
preTableCfs.get(table);
-3918if (cfs == null  
(removeCfs == null || removeCfs.isEmpty())) {
-3919  preTableCfs.remove(table);
-3920} else if (cfs != null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3921  SetString cfSet = new 
HashSetString(cfs);
-3922  cfSet.removeAll(removeCfs);
-3923  if (cfSet.isEmpty()) {
-3924preTableCfs.remove(table);
-3925  } else {
-3926preTableCfs.put(table, 
Lists.newArrayList(cfSet));
-3927  }
-3928} else if (cfs == null 
 (removeCfs != null  !removeCfs.isEmpty())) {
-3929  throw new 
ReplicationException("Cannot remove cf of table: " + table
-3930  + " which doesn't specify 
cfs from table-cfs config in peer: " + id);
-3931} else if (cfs != null 
 (removeCfs == null || removeCfs.isEmpty())) {
-3932  throw new 
ReplicationException("Cannot remove table: " + table
-3933  + " which has specified 
cfs from table-cfs config in peer: " + id);
-3934}
-3935  } else {
-3936throw new 
ReplicationException("No table: " + table + " in table-cfs config of peer: " + 
id);
-3937  }
-3938}
-3939updateReplicationPeerConfig(id, 
peerConfig);
-3940  }
-3941
-3942  @Override
-3943  public 
ListReplicationPeerDescription listReplicationPeers() throws 
IOException {
-3944return 
listReplicationPeers((Pattern)null);
-3945  }
-3946
-3947  @Override
-3948  public 
ListReplicationPeerDescription listReplicationPeers(String regex) 
throws IOException {
-3949return 
listReplicationPeers(Pattern.compile(regex));
-3950  }
-3951
-3952  @Override
-3953  public 
ListReplicationPeerDescription listReplicationPeers(Pattern pattern)
-3954  throws IOException {
-3955return executeCallable(new 
MasterCallableListReplicationPeerDescription(getConnection(),
-3956getRpcControllerFactory()) {
-3957  @Override
-3958  protected 
ListReplicationPeerDescription rpcCall() throws Exception {
-3959
ListReplicationProtos.ReplicationPeerDescription peersList = 

hbase git commit: HBASE-17915 Implement async replication admin methods

2017-04-20 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master b1ef8dd43 -> 40cc666ac


HBASE-17915 Implement async replication admin methods


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/40cc666a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/40cc666a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/40cc666a

Branch: refs/heads/master
Commit: 40cc666ac984e846a8c7105b771ce6bec90c4ad3
Parents: b1ef8dd
Author: Guanghao Zhang 
Authored: Thu Apr 20 18:13:03 2017 +0800
Committer: Guanghao Zhang 
Committed: Thu Apr 20 18:13:03 2017 +0800

--
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  92 
 .../hadoop/hbase/client/AsyncHBaseAdmin.java| 249 +--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  58 +--
 .../replication/ReplicationSerDeHelper.java |  67 +++
 .../hadoop/hbase/client/TestAsyncAdminBase.java |   2 +-
 .../client/TestAsyncReplicationAdminApi.java| 416 +++
 6 files changed, 802 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/40cc666a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 270f28f..5d2955f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -18,6 +18,8 @@
 package org.apache.hadoop.hbase.client;
 
 import java.util.List;
+import java.util.Collection;
+import java.util.Map;
 import java.util.concurrent.CompletableFuture;
 import java.util.regex.Pattern;
 
@@ -30,6 +32,9 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.quotas.QuotaFilter;
 import org.apache.hadoop.hbase.quotas.QuotaSettings;
+import org.apache.hadoop.hbase.client.replication.TableCFs;
+import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
+import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 import org.apache.hadoop.hbase.util.Pair;
 
 /**
@@ -481,4 +486,91 @@ public interface AsyncAdmin {
* @return the QuotaSetting list, which wrapped by a CompletableFuture.
*/
   CompletableFuture getQuota(QuotaFilter filter);
+
+  /**
+   * Add a new replication peer for replicating data to slave cluster
+   * @param peerId a short name that identifies the peer
+   * @param peerConfig configuration for the replication slave cluster
+   */
+  CompletableFuture addReplicationPeer(final String peerId,
+  final ReplicationPeerConfig peerConfig);
+
+  /**
+   * Remove a peer and stop the replication
+   * @param peerId a short name that identifies the peer
+   */
+  CompletableFuture removeReplicationPeer(final String peerId);
+
+  /**
+   * Restart the replication stream to the specified peer
+   * @param peerId a short name that identifies the peer
+   */
+  CompletableFuture enableReplicationPeer(final String peerId);
+
+  /**
+   * Stop the replication stream to the specified peer
+   * @param peerId a short name that identifies the peer
+   */
+  CompletableFuture disableReplicationPeer(final String peerId);
+
+  /**
+   * Returns the configured ReplicationPeerConfig for the specified peer
+   * @param peerId a short name that identifies the peer
+   * @return ReplicationPeerConfig for the peer wrapped by a {@link 
CompletableFuture}.
+   */
+  CompletableFuture getReplicationPeerConfig(final 
String peerId);
+
+  /**
+   * Update the peerConfig for the specified peer
+   * @param peerId a short name that identifies the peer
+   * @param peerConfig new config for the peer
+   */
+  CompletableFuture updateReplicationPeerConfig(final String peerId,
+  final ReplicationPeerConfig peerConfig);
+
+  /**
+   * Append the replicable table-cf config of the specified peer
+   * @param id a short that identifies the cluster
+   * @param tableCfs A map from tableName to column family names
+   */
+  CompletableFuture appendReplicationPeerTableCFs(String id,
+  Map tableCfs);
+
+  /**
+   * Remove some table-cfs from config of the specified peer
+   * @param id a short name that identifies the cluster
+   * @param tableCfs A map from tableName to column family names
+   */
+  CompletableFuture removeReplicationPeerTableCFs(String id,
+  Map tableCfs);
+
+  /**
+   * Return a list of replication peers.
+   * @return a list of replication peers description. The return value will be 
wrapped by a
+   * {@link 

hbase git commit: HBASE-17542 Move backup system table into separate namespace

2017-04-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 0953c1447 -> b1ef8dd43


HBASE-17542 Move backup system table into separate namespace


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b1ef8dd4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b1ef8dd4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b1ef8dd4

Branch: refs/heads/master
Commit: b1ef8dd43aa0f0102f296ea9b3eb76b5623052f5
Parents: 0953c14
Author: tedyu 
Authored: Thu Apr 20 02:57:24 2017 -0700
Committer: tedyu 
Committed: Thu Apr 20 02:57:24 2017 -0700

--
 .../hadoop/hbase/backup/BackupHFileCleaner.java  |  8 
 .../hbase/backup/BackupRestoreConstants.java |  2 +-
 .../hbase/backup/impl/BackupSystemTable.java | 19 +++
 3 files changed, 24 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b1ef8dd4/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
index b6b4c0a..8e6e843 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
@@ -54,9 +54,9 @@ public class BackupHFileCleaner extends 
BaseHFileCleanerDelegate implements Abor
   private boolean aborted;
   private Configuration conf;
   private Connection connection;
-  private long prevReadFromBackupTbl = 0, // timestamp of most recent read 
from hbase:backup table
-  secondPrevReadFromBackupTbl = 0; // timestamp of 2nd most recent read 
from hbase:backup table
-  //used by unit test to skip reading hbase:backup
+  private long prevReadFromBackupTbl = 0, // timestamp of most recent read 
from backup:system table
+  secondPrevReadFromBackupTbl = 0; // timestamp of 2nd most recent read 
from backup:system table
+  //used by unit test to skip reading backup:system
   private boolean checkForFullyBackedUpTables = true;
   private List fullyBackedUpTables = null;
 
@@ -117,7 +117,7 @@ public class BackupHFileCleaner extends 
BaseHFileCleanerDelegate implements Abor
 Iterable deletables = Iterables.filter(files, new 
Predicate() {
   @Override
   public boolean apply(FileStatus file) {
-// If the file is recent, be conservative and wait for one more scan 
of hbase:backup table
+// If the file is recent, be conservative and wait for one more scan 
of backup:system table
 if (file.getModificationTime() > secondPrevReadFromBackupTbl) {
   return false;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1ef8dd4/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
index 770ccce..e46904b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
@@ -30,7 +30,7 @@ public interface BackupRestoreConstants {
* Backup/Restore constants
*/
   public final static String BACKUP_SYSTEM_TABLE_NAME_KEY = 
"hbase.backup.system.table.name";
-  public final static String BACKUP_SYSTEM_TABLE_NAME_DEFAULT = "hbase:backup";
+  public final static String BACKUP_SYSTEM_TABLE_NAME_DEFAULT = 
"backup:system";
 
   public final static String BACKUP_SYSTEM_TTL_KEY = "hbase.backup.system.ttl";
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1ef8dd4/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
index 1ba8087..217e750 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
 import 

hbase git commit: HBASE-17940 HMaster can not start due to Jasper related classes conflict

2017-04-20 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 3acd8e464 -> 0953c1447


HBASE-17940 HMaster can not start due to Jasper related classes conflict


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0953c144
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0953c144
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0953c144

Branch: refs/heads/master
Commit: 0953c144700c18b16f0d34de5ccec90e7c9cef3d
Parents: 3acd8e4
Author: zhangduo 
Authored: Wed Apr 19 21:22:19 2017 +0800
Committer: zhangduo 
Committed: Thu Apr 20 16:06:50 2017 +0800

--
 hbase-server/pom.xml |  9 -
 pom.xml  | 39 ---
 2 files changed, 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0953c144/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 10093cb..977a4c3 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -511,15 +511,6 @@
   zookeeper
 
 
-  tomcat
-  jasper-compiler
-  compile
-
-
-  tomcat
-  jasper-runtime
-
-
   org.jamon
   jamon-runtime
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0953c144/pom.xml
--
diff --git a/pom.xml b/pom.xml
index aff01d6..c66c93c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1254,7 +1254,6 @@
 3.1.2
 12.0.1
 2.23.2
-5.5.23
 2.2.2
 9.3.8.v20160314
 9.2.19.v20160908
@@ -1715,44 +1714,6 @@
 ${jackson1.version}
   
   
-
-tomcat
-jasper-compiler
-${jasper.version}
-runtime
-
-  
-javax.servlet
-jsp-api
-  
-  
-javax.servlet
-servlet-api
-  
-  
-ant
-ant
-  
-
-  
-  
-tomcat
-jasper-runtime
-${jasper.version}
-runtime
-
-  
-javax.servlet
-servlet-api
-  
-
-  
-  
 org.jamon
 jamon-runtime
 ${jamon-runtime.version}