hbase git commit: HBASE-20008 [backport] NullPointerException when restoring a snapshot after splitting a region

2018-02-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 451f2fec0 -> 491adbc5f


HBASE-20008 [backport] NullPointerException when restoring a snapshot after 
splitting a region

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/491adbc5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/491adbc5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/491adbc5

Branch: refs/heads/branch-1.4
Commit: 491adbc5fa2d0327b88ced2b2f58f06922bfe8a7
Parents: 451f2fe
Author: Toshihiro Suzuki 
Authored: Thu Feb 15 18:03:02 2018 +0900
Committer: tedyu 
Committed: Tue Feb 20 20:29:56 2018 -0800

--
 .../master/snapshot/RestoreSnapshotHandler.java |  5 +-
 .../hbase/snapshot/RestoreSnapshotHelper.java   | 50 ++
 .../client/TestRestoreSnapshotFromClient.java   | 54 
 3 files changed, 85 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/491adbc5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
index 0ed75a3..f9aea13 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
@@ -183,8 +183,9 @@ public class RestoreSnapshotHandler extends 
TableEventHandler implements Snapsho
   String msg = "restore snapshot=" + 
ClientSnapshotDescriptionUtils.toString(snapshot)
   + " failed. Try re-running the restore command.";
   LOG.error(msg, e);
-  monitor.receive(new 
ForeignException(masterServices.getServerName().toString(), e));
-  throw new RestoreSnapshotException(msg, e);
+  IOException rse = new RestoreSnapshotException(msg, e);
+  monitor.receive(new 
ForeignException(masterServices.getServerName().toString(), rse));
+  throw rse;
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/491adbc5/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 75dac43..fb535de 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -209,38 +209,39 @@ public class RestoreSnapshotHelper {
   metaChanges.addRegionToRemove(regionInfo);
 }
   }
-
-  // Restore regions using the snapshot data
-  monitor.rethrowException();
-  status.setStatus("Restoring table regions...");
-  restoreHdfsRegions(exec, regionManifests, 
metaChanges.getRegionsToRestore());
-  status.setStatus("Finished restoring all table regions.");
-
-  // Remove regions from the current table
-  monitor.rethrowException();
-  status.setStatus("Starting to delete excess regions from table");
-  removeHdfsRegions(exec, metaChanges.getRegionsToRemove());
-  status.setStatus("Finished deleting excess regions from table.");
 }
 
 // Regions to Add: present in the snapshot but not in the current table
+List regionsToAdd = new 
ArrayList(regionNames.size());
 if (regionNames.size() > 0) {
-  List regionsToAdd = new 
ArrayList(regionNames.size());
-
   monitor.rethrowException();
   for (String regionName: regionNames) {
 LOG.info("region to add: " + regionName);
 
regionsToAdd.add(HRegionInfo.convert(regionManifests.get(regionName).getRegionInfo()));
   }
-
-  // Create new regions cloning from the snapshot
-  monitor.rethrowException();
-  status.setStatus("Cloning regions...");
-  HRegionInfo[] clonedRegions = cloneHdfsRegions(exec, regionManifests, 
regionsToAdd);
-  metaChanges.setNewRegions(clonedRegions);
-  status.setStatus("Finished cloning regions.");
 }
 
+// Create new regions cloning from the snapshot
+// HBASE-20008: We need to call cloneHdfsRegions() before 
restoreHdfsRegions() because
+// regionsMap is constructed in cloneHdfsRegions() and it can be used in 
restoreHdfsRegions().
+monitor.rethrowException();
+status.setStatus("Cloning regions...");
+HRegionInfo[] clonedRegions = cloneHdfsRegions(exec, regionManifests, 
regionsToAdd);
+metaChanges.setNewRegio

hbase git commit: HBASE-20008 [backport] NullPointerException when restoring a snapshot after splitting a region

2018-02-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 21f6830fe -> c0bca137f


HBASE-20008 [backport] NullPointerException when restoring a snapshot after 
splitting a region

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c0bca137
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c0bca137
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c0bca137

Branch: refs/heads/branch-1
Commit: c0bca137ff1018a36f0bad9440cff1c0fe89a4ff
Parents: 21f6830
Author: Toshihiro Suzuki 
Authored: Thu Feb 15 18:03:02 2018 +0900
Committer: tedyu 
Committed: Tue Feb 20 20:29:08 2018 -0800

--
 .../master/snapshot/RestoreSnapshotHandler.java |  5 +-
 .../hbase/snapshot/RestoreSnapshotHelper.java   | 50 ++
 .../client/TestRestoreSnapshotFromClient.java   | 54 
 3 files changed, 85 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c0bca137/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
index 0ed75a3..f9aea13 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
@@ -183,8 +183,9 @@ public class RestoreSnapshotHandler extends 
TableEventHandler implements Snapsho
   String msg = "restore snapshot=" + 
ClientSnapshotDescriptionUtils.toString(snapshot)
   + " failed. Try re-running the restore command.";
   LOG.error(msg, e);
-  monitor.receive(new 
ForeignException(masterServices.getServerName().toString(), e));
-  throw new RestoreSnapshotException(msg, e);
+  IOException rse = new RestoreSnapshotException(msg, e);
+  monitor.receive(new 
ForeignException(masterServices.getServerName().toString(), rse));
+  throw rse;
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0bca137/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 75dac43..fb535de 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -209,38 +209,39 @@ public class RestoreSnapshotHelper {
   metaChanges.addRegionToRemove(regionInfo);
 }
   }
-
-  // Restore regions using the snapshot data
-  monitor.rethrowException();
-  status.setStatus("Restoring table regions...");
-  restoreHdfsRegions(exec, regionManifests, 
metaChanges.getRegionsToRestore());
-  status.setStatus("Finished restoring all table regions.");
-
-  // Remove regions from the current table
-  monitor.rethrowException();
-  status.setStatus("Starting to delete excess regions from table");
-  removeHdfsRegions(exec, metaChanges.getRegionsToRemove());
-  status.setStatus("Finished deleting excess regions from table.");
 }
 
 // Regions to Add: present in the snapshot but not in the current table
+List regionsToAdd = new 
ArrayList(regionNames.size());
 if (regionNames.size() > 0) {
-  List regionsToAdd = new 
ArrayList(regionNames.size());
-
   monitor.rethrowException();
   for (String regionName: regionNames) {
 LOG.info("region to add: " + regionName);
 
regionsToAdd.add(HRegionInfo.convert(regionManifests.get(regionName).getRegionInfo()));
   }
-
-  // Create new regions cloning from the snapshot
-  monitor.rethrowException();
-  status.setStatus("Cloning regions...");
-  HRegionInfo[] clonedRegions = cloneHdfsRegions(exec, regionManifests, 
regionsToAdd);
-  metaChanges.setNewRegions(clonedRegions);
-  status.setStatus("Finished cloning regions.");
 }
 
+// Create new regions cloning from the snapshot
+// HBASE-20008: We need to call cloneHdfsRegions() before 
restoreHdfsRegions() because
+// regionsMap is constructed in cloneHdfsRegions() and it can be used in 
restoreHdfsRegions().
+monitor.rethrowException();
+status.setStatus("Cloning regions...");
+HRegionInfo[] clonedRegions = cloneHdfsRegions(exec, regionManifests, 
regionsToAdd);
+metaChanges.setNewRegions(c

hbase git commit: Fix compilation problem with TestZKPermissionsWatcher after f2631371fc

2018-02-20 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 7eaf67a33 -> 451f2fec0


Fix compilation problem with TestZKPermissionsWatcher after f2631371fc


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/451f2fec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/451f2fec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/451f2fec

Branch: refs/heads/branch-1.4
Commit: 451f2fec06617372430573ec64463b39b20833d0
Parents: 7eaf67a
Author: Andrew Purtell 
Authored: Tue Feb 20 17:10:38 2018 -0800
Committer: Andrew Purtell 
Committed: Tue Feb 20 17:13:06 2018 -0800

--
 .../hadoop/hbase/security/access/TestZKPermissionsWatcher.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/451f2fec/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
index 5ae0f59..c99cbaa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
@@ -44,8 +44,8 @@ import org.junit.experimental.categories.Category;
  * Test the reading and writing of access permissions to and from zookeeper.
  */
 @Category(LargeTests.class)
-public class TestZKPermissionWatcher {
-  private static final Log LOG = 
LogFactory.getLog(TestZKPermissionWatcher.class);
+public class TestZKPermissionsWatcher {
+  private static final Log LOG = 
LogFactory.getLog(TestZKPermissionsWatcher.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static TableAuthManager AUTH_A;
   private static TableAuthManager AUTH_B;



[2/4] hbase git commit: HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

2018-02-20 Thread apurtell
HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7eaf67a3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7eaf67a3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7eaf67a3

Branch: refs/heads/branch-1.4
Commit: 7eaf67a33e36516d8be7a67ce8dadd0d533dc5e4
Parents: f263137
Author: Chia-Ping Tsai 
Authored: Sun Feb 18 17:46:08 2018 +0800
Committer: Andrew Purtell 
Committed: Tue Feb 20 16:59:47 2018 -0800

--
 .../hbase/client/BufferedMutatorImpl.java   | 69 +++-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 45 +
 .../hadoop/hbase/regionserver/StoreScanner.java |  2 +-
 3 files changed, 83 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7eaf67a3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
index 1974be3..d207a82 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
@@ -16,6 +16,7 @@
 package org.apache.hadoop.hbase.client;
 
 import com.google.common.annotations.VisibleForTesting;
+import java.io.Closeable;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -59,7 +60,7 @@ import java.util.concurrent.atomic.AtomicLong;
 public class BufferedMutatorImpl implements BufferedMutator {
 
   private static final Log LOG = LogFactory.getLog(BufferedMutatorImpl.class);
-  
+
   private final ExceptionListener listener;
 
   protected ClusterConnection connection; // non-final so can be overridden in 
test
@@ -228,26 +229,22 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 }
 
 if (!synchronous) {
-  QueueRowAccess taker = new QueueRowAccess();
-  try {
+  try (QueueRowAccess taker = createQueueRowAccess()){
 ap.submit(tableName, taker, true, null, false);
 if (ap.hasError()) {
   LOG.debug(tableName + ": One or more of the operations have failed -"
   + " waiting for all operation in progress to finish 
(successfully or not)");
 }
-  } finally {
-taker.restoreRemainder();
   }
 }
 if (synchronous || ap.hasError()) {
-  QueueRowAccess taker = new QueueRowAccess();
-  try {
-while (!taker.isEmpty()) {
+  while (true) {
+try (QueueRowAccess taker = createQueueRowAccess()){
+  if (taker.isEmpty()) {
+break;
+  }
   ap.submit(tableName, taker, true, null, false);
-  taker.reset();
 }
-  } finally {
-taker.restoreRemainder();
   }
 
   RetriesExhaustedWithDetailsException error =
@@ -304,36 +301,35 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 return Arrays.asList(writeAsyncBuffer.toArray(new Row[0]));
   }
 
-  private class QueueRowAccess implements RowAccess {
-private int remainder = undealtMutationCount.getAndSet(0);
+  @VisibleForTesting
+  QueueRowAccess createQueueRowAccess() {
+return new QueueRowAccess();
+  }
 
-void reset() {
-  restoreRemainder();
-  remainder = undealtMutationCount.getAndSet(0);
-}
+  @VisibleForTesting
+  class QueueRowAccess implements RowAccess, Closeable {
+private int remainder = undealtMutationCount.getAndSet(0);
+private Mutation last = null;
 
 @Override
 public Iterator iterator() {
   return new Iterator() {
-private final Iterator iter = writeAsyncBuffer.iterator();
 private int countDown = remainder;
-private Mutation last = null;
 @Override
 public boolean hasNext() {
-  if (countDown <= 0) {
-return false;
-  }
-  return iter.hasNext();
+  return countDown > 0;
 }
 @Override
 public Row next() {
+  restoreLastMutation();
   if (!hasNext()) {
 throw new NoSuchElementException();
   }
-  last = iter.next();
+  last = writeAsyncBuffer.poll();
   if (last == null) {
 throw new NoSuchElementException();
   }
+  currentWriteBufferSize.addAndGet(-last.heapSize());
   --countDown;
   return last;
 }
@@ -342,28 +338,37 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
   if (last == null) {

[1/4] hbase git commit: HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

2018-02-20 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 fd8189d31 -> 21f6830fe
  refs/heads/branch-1.4 f2631371f -> 7eaf67a33
  refs/heads/branch-2 c5ca3c2fe -> bc1ac49de
  refs/heads/master 0068b95c8 -> 79d9403a7


HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/21f6830f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/21f6830f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/21f6830f

Branch: refs/heads/branch-1
Commit: 21f6830fe182069582d1cf3d4e82345efd24411e
Parents: fd8189d
Author: Chia-Ping Tsai 
Authored: Sun Feb 18 20:48:48 2018 +0800
Committer: Andrew Purtell 
Committed: Tue Feb 20 16:59:44 2018 -0800

--
 .../hbase/client/BufferedMutatorImpl.java   | 69 +++-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 49 +-
 .../hadoop/hbase/regionserver/StoreScanner.java |  2 +-
 3 files changed, 85 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/21f6830f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
index 638955e..e33bd7c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
@@ -17,6 +17,7 @@ package org.apache.hadoop.hbase.client;
 
 import static org.apache.hadoop.hbase.client.BufferedMutatorParams.UNSET;
 import com.google.common.annotations.VisibleForTesting;
+import java.io.Closeable;
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.util.Arrays;
@@ -61,7 +62,7 @@ import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 public class BufferedMutatorImpl implements BufferedMutator {
 
   private static final Log LOG = LogFactory.getLog(BufferedMutatorImpl.class);
-  
+
   private final ExceptionListener listener;
 
   protected ClusterConnection connection; // non-final so can be overridden in 
test
@@ -288,26 +289,22 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 }
 
 if (!synchronous) {
-  QueueRowAccess taker = new QueueRowAccess();
-  try {
+  try (QueueRowAccess taker = createQueueRowAccess()){
 ap.submit(tableName, taker, true, null, false);
 if (ap.hasError()) {
   LOG.debug(tableName + ": One or more of the operations have failed -"
   + " waiting for all operation in progress to finish 
(successfully or not)");
 }
-  } finally {
-taker.restoreRemainder();
   }
 }
 if (synchronous || ap.hasError()) {
-  QueueRowAccess taker = new QueueRowAccess();
-  try {
-while (!taker.isEmpty()) {
+  while (true) {
+try (QueueRowAccess taker = createQueueRowAccess()){
+  if (taker.isEmpty()) {
+break;
+  }
   ap.submit(tableName, taker, true, null, false);
-  taker.reset();
 }
-  } finally {
-taker.restoreRemainder();
   }
 
   RetriesExhaustedWithDetailsException error =
@@ -444,36 +441,35 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 return Arrays.asList(writeAsyncBuffer.toArray(new Row[0]));
   }
 
-  private class QueueRowAccess implements RowAccess {
-private int remainder = undealtMutationCount.getAndSet(0);
+  @VisibleForTesting
+  QueueRowAccess createQueueRowAccess() {
+return new QueueRowAccess();
+  }
 
-void reset() {
-  restoreRemainder();
-  remainder = undealtMutationCount.getAndSet(0);
-}
+  @VisibleForTesting
+  class QueueRowAccess implements RowAccess, Closeable {
+private int remainder = undealtMutationCount.getAndSet(0);
+private Mutation last = null;
 
 @Override
 public Iterator iterator() {
   return new Iterator() {
-private final Iterator iter = writeAsyncBuffer.iterator();
 private int countDown = remainder;
-private Mutation last = null;
 @Override
 public boolean hasNext() {
-  if (countDown <= 0) {
-return false;
-  }
-  return iter.hasNext();
+  return countDown > 0;
 }
 @Override
 public Row next() {
+  restoreLastMutation();
   if (!hasNext()) {
 throw new NoSuchElementException();
   }
-  last = iter.next();
+  last = writeAsyncBuffer.poll();
   if (last == null) {
 throw new NoSuchElement

[3/4] hbase git commit: HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

2018-02-20 Thread apurtell
HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/79d9403a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/79d9403a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/79d9403a

Branch: refs/heads/master
Commit: 79d9403a79cca60e614834659e3d9005d5482cac
Parents: 0068b95
Author: Chia-Ping Tsai 
Authored: Sun Feb 18 21:45:04 2018 +0800
Committer: Andrew Purtell 
Committed: Tue Feb 20 16:59:48 2018 -0800

--
 .../hbase/client/BufferedMutatorImpl.java   | 47 +++-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 44 ++
 .../hbase/regionserver/wal/ReaderBase.java  |  2 +-
 3 files changed, 81 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/79d9403a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
index 9d24b4d..d4bc811 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
@@ -295,7 +295,7 @@ public class BufferedMutatorImpl implements BufferedMutator 
{
 break;
   }
   AsyncRequestFuture asf;
-  try (QueueRowAccess access = new QueueRowAccess()) {
+  try (QueueRowAccess access = createQueueRowAccess()) {
 if (access.isEmpty()) {
   // It means someone has gotten the ticker to run the flush.
   break;
@@ -406,16 +406,46 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 return currentWriteBufferSize.get();
   }
 
+  /**
+   * Count the mutations which haven't been processed.
+   * @return count of undealt mutation
+   */
   @VisibleForTesting
   int size() {
 return undealtMutationCount.get();
   }
 
-  private class QueueRowAccess implements RowAccess, Closeable {
+  /**
+   * Count the mutations which haven't been flushed
+   * @return count of unflushed mutation
+   */
+  @VisibleForTesting
+  int getUnflushedSize() {
+return writeAsyncBuffer.size();
+  }
+
+  @VisibleForTesting
+  QueueRowAccess createQueueRowAccess() {
+return new QueueRowAccess();
+  }
+
+  @VisibleForTesting
+  class QueueRowAccess implements RowAccess, Closeable {
 private int remainder = undealtMutationCount.getAndSet(0);
+private Mutation last = null;
+
+private void restoreLastMutation() {
+  // restore the last mutation since it isn't submitted
+  if (last != null) {
+writeAsyncBuffer.add(last);
+currentWriteBufferSize.addAndGet(last.heapSize());
+last = null;
+  }
+}
 
 @Override
 public void close() {
+  restoreLastMutation();
   if (remainder > 0) {
 undealtMutationCount.addAndGet(remainder);
 remainder = 0;
@@ -425,25 +455,22 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 @Override
 public Iterator iterator() {
   return new Iterator() {
-private final Iterator iter = writeAsyncBuffer.iterator();
 private int countDown = remainder;
-private Mutation last = null;
 @Override
 public boolean hasNext() {
-  if (countDown <= 0) {
-return false;
-  }
-  return iter.hasNext();
+  return countDown > 0;
 }
 @Override
 public Row next() {
+  restoreLastMutation();
   if (!hasNext()) {
 throw new NoSuchElementException();
   }
-  last = iter.next();
+  last = writeAsyncBuffer.poll();
   if (last == null) {
 throw new NoSuchElementException();
   }
+  currentWriteBufferSize.addAndGet(-last.heapSize());
   --countDown;
   return last;
 }
@@ -452,8 +479,6 @@ public class BufferedMutatorImpl implements BufferedMutator 
{
   if (last == null) {
 throw new IllegalStateException();
   }
-  iter.remove();
-  currentWriteBufferSize.addAndGet(-last.heapSize());
   --remainder;
   last = null;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/79d9403a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.j

[4/4] hbase git commit: HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

2018-02-20 Thread apurtell
HBASE-20017 BufferedMutatorImpl submit the same mutation repeatedly

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc1ac49d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc1ac49d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc1ac49d

Branch: refs/heads/branch-2
Commit: bc1ac49de2c2f9a54f00dc28a5aeb0dc7b113022
Parents: c5ca3c2
Author: Chia-Ping Tsai 
Authored: Sun Feb 18 21:45:04 2018 +0800
Committer: Andrew Purtell 
Committed: Tue Feb 20 16:59:48 2018 -0800

--
 .../hbase/client/BufferedMutatorImpl.java   | 47 +++-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 44 ++
 .../hbase/regionserver/wal/ReaderBase.java  |  2 +-
 3 files changed, 81 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bc1ac49d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
index 9d24b4d..d4bc811 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
@@ -295,7 +295,7 @@ public class BufferedMutatorImpl implements BufferedMutator 
{
 break;
   }
   AsyncRequestFuture asf;
-  try (QueueRowAccess access = new QueueRowAccess()) {
+  try (QueueRowAccess access = createQueueRowAccess()) {
 if (access.isEmpty()) {
   // It means someone has gotten the ticker to run the flush.
   break;
@@ -406,16 +406,46 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 return currentWriteBufferSize.get();
   }
 
+  /**
+   * Count the mutations which haven't been processed.
+   * @return count of undealt mutation
+   */
   @VisibleForTesting
   int size() {
 return undealtMutationCount.get();
   }
 
-  private class QueueRowAccess implements RowAccess, Closeable {
+  /**
+   * Count the mutations which haven't been flushed
+   * @return count of unflushed mutation
+   */
+  @VisibleForTesting
+  int getUnflushedSize() {
+return writeAsyncBuffer.size();
+  }
+
+  @VisibleForTesting
+  QueueRowAccess createQueueRowAccess() {
+return new QueueRowAccess();
+  }
+
+  @VisibleForTesting
+  class QueueRowAccess implements RowAccess, Closeable {
 private int remainder = undealtMutationCount.getAndSet(0);
+private Mutation last = null;
+
+private void restoreLastMutation() {
+  // restore the last mutation since it isn't submitted
+  if (last != null) {
+writeAsyncBuffer.add(last);
+currentWriteBufferSize.addAndGet(last.heapSize());
+last = null;
+  }
+}
 
 @Override
 public void close() {
+  restoreLastMutation();
   if (remainder > 0) {
 undealtMutationCount.addAndGet(remainder);
 remainder = 0;
@@ -425,25 +455,22 @@ public class BufferedMutatorImpl implements 
BufferedMutator {
 @Override
 public Iterator iterator() {
   return new Iterator() {
-private final Iterator iter = writeAsyncBuffer.iterator();
 private int countDown = remainder;
-private Mutation last = null;
 @Override
 public boolean hasNext() {
-  if (countDown <= 0) {
-return false;
-  }
-  return iter.hasNext();
+  return countDown > 0;
 }
 @Override
 public Row next() {
+  restoreLastMutation();
   if (!hasNext()) {
 throw new NoSuchElementException();
   }
-  last = iter.next();
+  last = writeAsyncBuffer.poll();
   if (last == null) {
 throw new NoSuchElementException();
   }
+  currentWriteBufferSize.addAndGet(-last.heapSize());
   --countDown;
   return last;
 }
@@ -452,8 +479,6 @@ public class BufferedMutatorImpl implements BufferedMutator 
{
   if (last == null) {
 throw new IllegalStateException();
   }
-  iter.remove();
-  currentWriteBufferSize.addAndGet(-last.heapSize());
   --remainder;
   last = null;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc1ac49d/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess

hbase git commit: HBASE-20032 Receving multiple warnings for missing reporting.plugins.plugin.version

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 cbd138d41 -> c5ca3c2fe


HBASE-20032 Receving multiple warnings for missing 
reporting.plugins.plugin.version


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c5ca3c2f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c5ca3c2f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c5ca3c2f

Branch: refs/heads/branch-2
Commit: c5ca3c2fe6bdf25ef54f68e48d2d3629c06f37f7
Parents: cbd138d
Author: Artem Ervits 
Authored: Tue Feb 20 15:15:34 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 15:17:00 2018 -0800

--
 pom.xml | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c5ca3c2f/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 715f2eb..d8dcc2a 100755
--- a/pom.xml
+++ b/pom.xml
@@ -3401,6 +3401,7 @@
 
   
 maven-project-info-reports-plugin
+2.9
 
   
 
@@ -3430,6 +3431,7 @@
   
 org.apache.maven.plugins
 maven-javadoc-plugin
+3.0.0
 
   
   
@@ -3635,6 +3637,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
+${maven.checkstyle.version}
 
   target/**
 
@@ -3643,6 +3646,7 @@
   
 org.scala-tools
 maven-scala-plugin
+2.15.2
   
 
   



hbase git commit: HBASE-20032 Receving multiple warnings for missing reporting.plugins.plugin.version

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master b75a8f404 -> 0068b95c8


HBASE-20032 Receving multiple warnings for missing 
reporting.plugins.plugin.version


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0068b95c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0068b95c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0068b95c

Branch: refs/heads/master
Commit: 0068b95c854d13e359b619aaa9879b5cbb993729
Parents: b75a8f4
Author: Artem Ervits 
Authored: Tue Feb 20 15:15:34 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 15:16:22 2018 -0800

--
 pom.xml | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0068b95c/pom.xml
--
diff --git a/pom.xml b/pom.xml
index d33478a..951747e 100755
--- a/pom.xml
+++ b/pom.xml
@@ -3491,6 +3491,7 @@
 
   
 maven-project-info-reports-plugin
+2.9
 
   
 
@@ -3520,6 +3521,7 @@
   
 org.apache.maven.plugins
 maven-javadoc-plugin
+3.0.0
 
   
   
@@ -3721,6 +3723,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
+${maven.checkstyle.version}
 
   target/**
 
@@ -3729,6 +3732,7 @@
   
 org.scala-tools
 maven-scala-plugin
+2.15.2
   
 
   



hbase git commit: HBASE-19954 Separate TestBlockReorder into individual tests to avoid ShutdownHook suppression error against hadoop3

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 7e2978787 -> cbd138d41


HBASE-19954 Separate TestBlockReorder into individual tests to avoid 
ShutdownHook suppression error against hadoop3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cbd138d4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cbd138d4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cbd138d4

Branch: refs/heads/branch-2
Commit: cbd138d41a5afdf343591a3287a7163212ff4b76
Parents: 7e29787
Author: Ted Yu 
Authored: Tue Feb 20 14:56:20 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 14:56:46 2018 -0800

--
 .../hadoop/hbase/fs/TestBlockReorder.java   | 258 +
 .../hbase/fs/TestBlockReorderBlockLocation.java | 159 +++
 .../hbase/fs/TestBlockReorderMultiBlocks.java   | 279 +++
 3 files changed, 443 insertions(+), 253 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cbd138d4/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index 59d2229..a75c0d0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -17,15 +17,10 @@
  */
 package org.apache.hadoop.hbase.fs;
 
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Field;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.BindException;
 import java.net.ServerSocket;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -35,32 +30,14 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.master.LoadBalancer;
-import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.DirectoryListing;
-import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.ipc.RemoteException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -74,6 +51,11 @@ import org.slf4j.LoggerFactory;
 
 /**
  * Tests for the hdfs fix from HBASE-6435.
+ *
+ * Please don't add new subtest which involves starting / stopping 
MiniDFSCluster in this class.
+ * When stopping MiniDFSCluster, shutdown hooks would be cleared in hadoop's 
ShutdownHookManager
+ *   in hadoop 3.
+ * This leads to 'Failed suppression of fs shutdown hook' error in region 
server.
  */
 @Category({MiscTests.class, LargeTests.class})
 public class TestBlockReorder {
@@ -249,234 +231,4 @@ public class TestBlockReorder {
 }
   }
 
-  /**
-   * Test that the hook works within HBase, including when there are multiple 
blocks.
-   */
-  @Test()
-  public void testHBaseCluster() throws Exception {
-byte[] sb = Bytes.toBytes("sb");
-htu.startMiniZKCluster();
-
-MiniHBaseCluster hbm = htu.startMiniHBaseCluster(1, 1);
-hbm.waitForActiveAndReadyMaster();
-HRegionServer targetRs = LoadBalancer.isTablesOnMaster(hbm.getConf())? 
hbm.getMaster():
-  hbm.getRegionServer(0);
-
-// We want to have a datanode with the same name as the region server, so
-//  we're going to get the regionservername, and start 

hbase git commit: HBASE-19954 Separate TestBlockReorder into individual tests to avoid ShutdownHook suppression error against hadoop3

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 6b8439366 -> b75a8f404


HBASE-19954 Separate TestBlockReorder into individual tests to avoid 
ShutdownHook suppression error against hadoop3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b75a8f40
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b75a8f40
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b75a8f40

Branch: refs/heads/master
Commit: b75a8f4047e2a33031b121dfe091a992809ebae1
Parents: 6b84393
Author: Ted Yu 
Authored: Tue Feb 20 14:56:20 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 14:56:20 2018 -0800

--
 .../hadoop/hbase/fs/TestBlockReorder.java   | 258 +
 .../hbase/fs/TestBlockReorderBlockLocation.java | 159 +++
 .../hbase/fs/TestBlockReorderMultiBlocks.java   | 279 +++
 3 files changed, 443 insertions(+), 253 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b75a8f40/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index 59d2229..a75c0d0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -17,15 +17,10 @@
  */
 package org.apache.hadoop.hbase.fs;
 
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Field;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.BindException;
 import java.net.ServerSocket;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -35,32 +30,14 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.master.LoadBalancer;
-import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.DirectoryListing;
-import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.ipc.RemoteException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -74,6 +51,11 @@ import org.slf4j.LoggerFactory;
 
 /**
  * Tests for the hdfs fix from HBASE-6435.
+ *
+ * Please don't add new subtest which involves starting / stopping 
MiniDFSCluster in this class.
+ * When stopping MiniDFSCluster, shutdown hooks would be cleared in hadoop's 
ShutdownHookManager
+ *   in hadoop 3.
+ * This leads to 'Failed suppression of fs shutdown hook' error in region 
server.
  */
 @Category({MiscTests.class, LargeTests.class})
 public class TestBlockReorder {
@@ -249,234 +231,4 @@ public class TestBlockReorder {
 }
   }
 
-  /**
-   * Test that the hook works within HBase, including when there are multiple 
blocks.
-   */
-  @Test()
-  public void testHBaseCluster() throws Exception {
-byte[] sb = Bytes.toBytes("sb");
-htu.startMiniZKCluster();
-
-MiniHBaseCluster hbm = htu.startMiniHBaseCluster(1, 1);
-hbm.waitForActiveAndReadyMaster();
-HRegionServer targetRs = LoadBalancer.isTablesOnMaster(hbm.getConf())? 
hbm.getMaster():
-  hbm.getRegionServer(0);
-
-// We want to have a datanode with the same name as the region server, so
-//  we're going to get the regionservername, and start a ne

hbase git commit: HBASE-19400 Add missing security checks in MasterRpcServices

2018-02-20 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/master 148356a7b -> 6b8439366


HBASE-19400 Add missing security checks in MasterRpcServices

- Added ADMIN permission check for following rpc calls:
  normalize, setNormalizerRunning, runCatalogScan, enableCatalogJanitor, 
runCleanerChore,
  setCleanerChoreRunning, execMasterService, execProcedure, execProcedureWithRet
- Moved authorizationEnabled check to start of AccessChecker's functions. 
Currently, and IDK why,
  we call authManager.authorize() first and then discard its result if 
authorizationEnabled is false. Weird.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6b843936
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6b843936
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6b843936

Branch: refs/heads/master
Commit: 6b8439366134b6749895b6019bc068d169865b48
Parents: 148356a
Author: Apekshit Sharma 
Authored: Tue Feb 13 12:33:43 2018 -0800
Committer: Apekshit Sharma 
Committed: Tue Feb 20 13:37:20 2018 -0700

--
 .../hbase/rsgroup/RSGroupAdminEndpoint.java |   3 +-
 .../hadoop/hbase/master/MasterRpcServices.java  | 120 -
 .../hbase/regionserver/HRegionServer.java   |   2 +-
 .../hbase/regionserver/RSRpcServices.java   |  25 +-
 .../hbase/security/access/AccessChecker.java| 102 
 .../hbase/security/access/AccessController.java |   5 +-
 .../security/access/TestAccessController.java   |  13 +-
 .../access/TestAdminOnlyOperations.java | 244 +++
 8 files changed, 389 insertions(+), 125 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6b843936/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index 435d138..7fec32d 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -79,7 +79,6 @@ import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.security.access.AccessChecker;
 import org.apache.hadoop.hbase.security.access.Permission.Action;
-import org.apache.hadoop.hbase.security.access.TableAuthManager;
 import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
@@ -127,7 +126,7 @@ public class RSGroupAdminEndpoint implements 
MasterCoprocessor, MasterObserver {
 
   @Override
   public void stop(CoprocessorEnvironment env) {
-TableAuthManager.release(accessChecker.getAuthManager());
+accessChecker.stop();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b843936/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 377a9c6..b4f0faf 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -86,6 +86,7 @@ import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.AccessChecker;
 import org.apache.hadoop.hbase.security.access.AccessController;
+import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.visibility.VisibilityController;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
@@ -349,6 +350,24 @@ public class MasterRpcServices extends RSRpcServices
 return new MasterAnnotationReadingPriorityFunction(this);
   }
 
+  /**
+   * Checks for the following pre-checks in order:
+   * 
+   *   Master is initialized
+   *   Rpc caller has admin permissions
+   * 
+   * @param requestName name of rpc request. Used in reporting failures to 
provide context.
+   * @throws ServiceException If any of the above listed pre-check fails.
+   */
+  private void rpcPreCheck(String requestName) throws ServiceException {
+try {
+  master.checkInitialized();
+  requirePermission(requestName, Permission.Action.ADMIN);
+} catch (IOException ioe) {
+  throw new ServiceException(ioe);
+}
+  }
+
   enum BalanceSwit

hbase git commit: HBASE-20029 @Ignore TestQuotaThrottle and TestReplicasClient#testCancelOfMultiGet

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 51cea3e2c -> 148356a7b


HBASE-20029 @Ignore TestQuotaThrottle and 
TestReplicasClient#testCancelOfMultiGet


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/148356a7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/148356a7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/148356a7

Branch: refs/heads/master
Commit: 148356a7bfeacce17aa85029348e5548579e78d6
Parents: 51cea3e
Author: Michael Stack 
Authored: Tue Feb 20 12:08:35 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 12:09:47 2018 -0800

--
 .../java/org/apache/hadoop/hbase/client/TestReplicasClient.java| 2 ++
 .../java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java | 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/148356a7/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
index c55c6ca..311f651 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
@@ -61,6 +61,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
@@ -616,6 +617,7 @@ public class TestReplicasClient {
 }
   }
 
+  @Ignore // Disabled because it is flakey. Fails 17% on constrained GCE. %3 
on Apache.
   @Test
   public void testCancelOfMultiGet() throws Exception {
 openRegion(hriSecondary);

http://git-wip-us.apache.org/repos/asf/hbase/blob/148356a7/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
index a7b8d9d..3b06d92 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
@@ -41,11 +41,13 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+@Ignore // Disabled because flakey. Fails ~30% on a resource constrained GCE 
though not on Apache.
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestQuotaThrottle {
 



hbase git commit: HBASE-20029 @Ignore TestQuotaThrottle and TestReplicasClient#testCancelOfMultiGet

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 9be0360c5 -> 7e2978787


HBASE-20029 @Ignore TestQuotaThrottle and 
TestReplicasClient#testCancelOfMultiGet


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7e297878
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7e297878
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7e297878

Branch: refs/heads/branch-2
Commit: 7e2978787d99b7a6b82f2601be1cda6a9af9c7ca
Parents: 9be0360
Author: Michael Stack 
Authored: Tue Feb 20 12:08:35 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 12:08:35 2018 -0800

--
 .../java/org/apache/hadoop/hbase/client/TestReplicasClient.java| 2 ++
 .../java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java | 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7e297878/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
index c55c6ca..311f651 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
@@ -61,6 +61,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
@@ -616,6 +617,7 @@ public class TestReplicasClient {
 }
   }
 
+  @Ignore // Disabled because it is flakey. Fails 17% on constrained GCE. %3 
on Apache.
   @Test
   public void testCancelOfMultiGet() throws Exception {
 openRegion(hriSecondary);

http://git-wip-us.apache.org/repos/asf/hbase/blob/7e297878/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
index a7b8d9d..3b06d92 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
@@ -41,11 +41,13 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+@Ignore // Disabled because flakey. Fails ~30% on a resource constrained GCE 
though not on Apache.
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestQuotaThrottle {
 



hbase git commit: Revert "HBASE-19970 Remove unused functions from TableAuthManager."

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 af5cd4985 -> f2631371f


Revert "HBASE-19970 Remove unused functions from TableAuthManager."

This reverts commit e6ce789b6fa44592c3a566703c78de0687154a84.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f2631371
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f2631371
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f2631371

Branch: refs/heads/branch-1.4
Commit: f2631371fc3d8656247cd1355b34bfd7539a6bf6
Parents: af5cd49
Author: Michael Stack 
Authored: Tue Feb 20 11:12:36 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 11:13:55 2018 -0800

--
 .../security/access/AccessControlLists.java |   9 +-
 .../hbase/security/access/AccessController.java |   4 +-
 .../hbase/security/access/TableAuthManager.java |  75 
 .../security/access/TestTablePermissions.java   |   2 +-
 .../access/TestZKPermissionWatcher.java | 179 ---
 .../access/TestZKPermissionsWatcher.java| 178 ++
 6 files changed, 259 insertions(+), 188 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f2631371/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index f508110..57c0f7b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -73,10 +73,6 @@ import org.apache.hadoop.io.Text;
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Lists;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableFactories;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.jasper.tagplugins.jstl.core.Remove;
 
 /**
  * Maintains lists of permission grants to users and groups to allow for
@@ -671,7 +667,8 @@ public class AccessControlLists {
*
* Writes a set of permission [user: table permission]
*/
-  public static byte[] writePermissionsAsBytes(ListMultimap perms) {
+  public static byte[] writePermissionsAsBytes(ListMultimap perms,
+  Configuration conf) {
 return 
ProtobufUtil.prependPBMagic(ProtobufUtil.toUserTablePermissions(perms).toByteArray());
   }
 
@@ -758,7 +755,7 @@ public class AccessControlLists {
  // Deserialize the table permissions from the KV
  // TODO: This can be improved. Don't build UsersAndPermissions just 
to unpack it again,
  // use the builder
- AccessControlProtos.UsersAndPermissions.Builder builder =
+ AccessControlProtos.UsersAndPermissions.Builder builder = 
AccessControlProtos.UsersAndPermissions.newBuilder();
  ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), 
tag.getTagLength());
  ListMultimap kvPerms =

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2631371/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index c889a3e..fd0a704 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -243,7 +243,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
   tables.entrySet()) {
   byte[] entry = t.getKey();
   ListMultimap perms = t.getValue();
-  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
+  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
   getAuthManager().getZKPermissionWatcher().writeToZookeeper(entry, 
serialized);
 }
 initialized = true;
@@ -275,7 +275,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 try (Table t = regionEnv.getTable(AccessControlLists.ACL_TABLE_NAME)) {
   ListMultimap perms =
   AccessControlLists.getPermissions(conf, entry, t);
-  byte[] serialized = 
AccessControlLists.writePermissionsAsBytes(perms);
+  byte[] serialized = 
AccessControlLists.writePermissionsAsBytes(perms, conf);
   zkw.writeToZookeeper(entry, serialized);
 }
   } c

[1/2] hbase git commit: Revert "HBASE-19970 (addendum for 1.x only) Remove unused functions from TableAuthManager."

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 7990546ef -> fd8189d31


Revert "HBASE-19970 (addendum for 1.x only) Remove unused functions from 
TableAuthManager."

This reverts commit 0f79c497c52fbe78a1f344675579b6eb26d23b70.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/071281cf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/071281cf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/071281cf

Branch: refs/heads/branch-1
Commit: 071281cf52307cf3def599d7850ef99e3f7c464f
Parents: 7990546
Author: Michael Stack 
Authored: Tue Feb 20 11:12:33 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 11:12:33 2018 -0800

--
 .../hadoop/hbase/security/access/TestZKPermissionWatcher.java  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/071281cf/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
index 0961cab..a80f184 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
@@ -47,8 +47,8 @@ import org.junit.experimental.categories.Category;
  * Test the reading and writing of access permissions to and from zookeeper.
  */
 @Category(LargeTests.class)
-public class TestZKPermissionWatcher {
-  private static final Log LOG = 
LogFactory.getLog(TestZKPermissionWatcher.class);
+public class TestZKPermissionsWatcher {
+  private static final Log LOG = 
LogFactory.getLog(TestZKPermissionsWatcher.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static TableAuthManager AUTH_A;
   private static TableAuthManager AUTH_B;
@@ -90,7 +90,7 @@ public class TestZKPermissionWatcher {
   }
 
   private void setTableACL(
-  User user, TableAuthManager srcAuthManager, final TableAuthManager 
destAuthManager,
+  User user, TableAuthManager srcAuthManager, TableAuthManager 
destAuthManager,
   TablePermission.Action... actions) throws Exception{
 // update ACL: george RW
 ListMultimap perms = ArrayListMultimap.create();



[2/2] hbase git commit: Revert "HBASE-19970 Remove unused functions from TableAuthManager."

2018-02-20 Thread stack
Revert "HBASE-19970 Remove unused functions from TableAuthManager."

This reverts commit e6ce789b6fa44592c3a566703c78de0687154a84.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fd8189d3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fd8189d3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fd8189d3

Branch: refs/heads/branch-1
Commit: fd8189d31d900896a93db2d9c08303f3dedc1dd8
Parents: 071281c
Author: Michael Stack 
Authored: Tue Feb 20 11:12:36 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 11:12:36 2018 -0800

--
 .../security/access/AccessControlLists.java |   9 +-
 .../hbase/security/access/AccessController.java |   4 +-
 .../hbase/security/access/TableAuthManager.java |  75 
 .../security/access/TestTablePermissions.java   |   2 +-
 .../access/TestZKPermissionWatcher.java | 179 ---
 .../access/TestZKPermissionsWatcher.java| 178 ++
 6 files changed, 259 insertions(+), 188 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fd8189d3/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index f508110..57c0f7b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -73,10 +73,6 @@ import org.apache.hadoop.io.Text;
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Lists;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableFactories;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.jasper.tagplugins.jstl.core.Remove;
 
 /**
  * Maintains lists of permission grants to users and groups to allow for
@@ -671,7 +667,8 @@ public class AccessControlLists {
*
* Writes a set of permission [user: table permission]
*/
-  public static byte[] writePermissionsAsBytes(ListMultimap perms) {
+  public static byte[] writePermissionsAsBytes(ListMultimap perms,
+  Configuration conf) {
 return 
ProtobufUtil.prependPBMagic(ProtobufUtil.toUserTablePermissions(perms).toByteArray());
   }
 
@@ -758,7 +755,7 @@ public class AccessControlLists {
  // Deserialize the table permissions from the KV
  // TODO: This can be improved. Don't build UsersAndPermissions just 
to unpack it again,
  // use the builder
- AccessControlProtos.UsersAndPermissions.Builder builder =
+ AccessControlProtos.UsersAndPermissions.Builder builder = 
AccessControlProtos.UsersAndPermissions.newBuilder();
  ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), 
tag.getTagLength());
  ListMultimap kvPerms =

http://git-wip-us.apache.org/repos/asf/hbase/blob/fd8189d3/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index c889a3e..fd0a704 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -243,7 +243,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
   tables.entrySet()) {
   byte[] entry = t.getKey();
   ListMultimap perms = t.getValue();
-  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms);
+  byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, 
conf);
   getAuthManager().getZKPermissionWatcher().writeToZookeeper(entry, 
serialized);
 }
 initialized = true;
@@ -275,7 +275,7 @@ public class AccessController extends 
BaseMasterAndRegionObserver
 try (Table t = regionEnv.getTable(AccessControlLists.ACL_TABLE_NAME)) {
   ListMultimap perms =
   AccessControlLists.getPermissions(conf, entry, t);
-  byte[] serialized = 
AccessControlLists.writePermissionsAsBytes(perms);
+  byte[] serialized = 
AccessControlLists.writePermissionsAsBytes(perms, conf);
   zkw.writeToZookeeper(entry, serialized);
 }
   } catch (IOException ex) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/fd8189d3/hba

hbase git commit: HBASE-20024 TestMergeTableRegionsProcedure is STILL flakey

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 2b1969898 -> 51cea3e2c


HBASE-20024 TestMergeTableRegionsProcedure is STILL flakey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51cea3e2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51cea3e2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51cea3e2

Branch: refs/heads/master
Commit: 51cea3e2c3c75b82148f14ca7903a88daa3738d9
Parents: 2b19698
Author: Michael Stack 
Authored: Tue Feb 20 07:21:52 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 11:08:27 2018 -0800

--
 .../hadoop/hbase/procedure2/ProcedureExecutor.java |  8 
 .../hadoop/hbase/procedure2/StateMachineProcedure.java |  9 ++---
 .../master/assignment/MergeTableRegionsProcedure.java  |  8 
 .../master/assignment/SplitTableRegionProcedure.java   | 13 ++---
 .../hbase/master/procedure/DeleteTableProcedure.java   |  4 ++--
 .../master/procedure/MasterProcedureScheduler.java |  2 +-
 .../assignment/TestMergeTableRegionsProcedure.java |  2 +-
 .../procedure/MasterProcedureTestingUtility.java   |  2 +-
 8 files changed, 29 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51cea3e2/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index af0a61b..665d223 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -931,18 +931,18 @@ public class ProcedureExecutor {
* Send an abort notification the specified procedure.
* Depending on the procedure implementation the abort can be considered or 
ignored.
* @param procId the procedure to abort
-   * @return true if the procedure exist and has received the abort, otherwise 
false.
+   * @return true if the procedure exists and has received the abort, 
otherwise false.
*/
   public boolean abort(final long procId) {
 return abort(procId, true);
   }
 
   /**
-   * Send an abort notification the specified procedure.
-   * Depending on the procedure implementation the abort can be considered or 
ignored.
+   * Send an abort notification to the specified procedure.
+   * Depending on the procedure implementation, the abort can be considered or 
ignored.
* @param procId the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
-   * @return true if the procedure exist and has received the abort, otherwise 
false.
+   * @return true if the procedure exists and has received the abort, 
otherwise false.
*/
   public boolean abort(final long procId, final boolean mayInterruptIfRunning) 
{
 final Procedure proc = procedures.get(procId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/51cea3e2/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 20bba58..c530386 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -209,18 +209,13 @@ public abstract class StateMachineProcedure
 
   @Override
   protected boolean abort(final TEnvironment env) {
-final boolean isDebugEnabled = LOG.isDebugEnabled();
 final TState state = getCurrentState();
-if (isDebugEnabled) {
-  LOG.debug("abort requested for " + this + " state=" + state);
-}
-
+LOG.debug("Abort requested for {}", this);
 if (hasMoreState()) {
   aborted.set(true);
   return true;
-} else if (isDebugEnabled) {
-  LOG.debug("ignoring abort request on state=" + state + " for " + this);
 }
+LOG.debug("Ignoring abort request on {}", this);
 return false;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/51cea3e2/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mas

hbase git commit: HBASE-20024 TestMergeTableRegionsProcedure is STILL flakey

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 a5443c18d -> 9be0360c5


HBASE-20024 TestMergeTableRegionsProcedure is STILL flakey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9be0360c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9be0360c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9be0360c

Branch: refs/heads/branch-2
Commit: 9be0360c5d2481506cc70fc624166524ee57a39b
Parents: a5443c1
Author: Michael Stack 
Authored: Tue Feb 20 07:21:52 2018 -0800
Committer: Michael Stack 
Committed: Tue Feb 20 11:07:36 2018 -0800

--
 .../hadoop/hbase/procedure2/ProcedureExecutor.java |  8 
 .../hadoop/hbase/procedure2/StateMachineProcedure.java |  9 ++---
 .../master/assignment/MergeTableRegionsProcedure.java  |  8 
 .../master/assignment/SplitTableRegionProcedure.java   | 13 ++---
 .../hbase/master/procedure/DeleteTableProcedure.java   |  4 ++--
 .../master/procedure/MasterProcedureScheduler.java |  2 +-
 .../assignment/TestMergeTableRegionsProcedure.java |  2 +-
 .../procedure/MasterProcedureTestingUtility.java   |  2 +-
 8 files changed, 29 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9be0360c/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index af0a61b..665d223 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -931,18 +931,18 @@ public class ProcedureExecutor {
* Send an abort notification the specified procedure.
* Depending on the procedure implementation the abort can be considered or 
ignored.
* @param procId the procedure to abort
-   * @return true if the procedure exist and has received the abort, otherwise 
false.
+   * @return true if the procedure exists and has received the abort, 
otherwise false.
*/
   public boolean abort(final long procId) {
 return abort(procId, true);
   }
 
   /**
-   * Send an abort notification the specified procedure.
-   * Depending on the procedure implementation the abort can be considered or 
ignored.
+   * Send an abort notification to the specified procedure.
+   * Depending on the procedure implementation, the abort can be considered or 
ignored.
* @param procId the procedure to abort
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
-   * @return true if the procedure exist and has received the abort, otherwise 
false.
+   * @return true if the procedure exists and has received the abort, 
otherwise false.
*/
   public boolean abort(final long procId, final boolean mayInterruptIfRunning) 
{
 final Procedure proc = procedures.get(procId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9be0360c/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 20bba58..c530386 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -209,18 +209,13 @@ public abstract class StateMachineProcedure
 
   @Override
   protected boolean abort(final TEnvironment env) {
-final boolean isDebugEnabled = LOG.isDebugEnabled();
 final TState state = getCurrentState();
-if (isDebugEnabled) {
-  LOG.debug("abort requested for " + this + " state=" + state);
-}
-
+LOG.debug("Abort requested for {}", this);
 if (hasMoreState()) {
   aborted.set(true);
   return true;
-} else if (isDebugEnabled) {
-  LOG.debug("ignoring abort request on state=" + state + " for " + this);
 }
+LOG.debug("Ignoring abort request on {}", this);
 return false;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/9be0360c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase

hbase git commit: HBASE-20023 CompactionTool command line examples are incorrect

2018-02-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 0152d5e2a -> a5443c18d


HBASE-20023 CompactionTool command line examples are incorrect

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a5443c18
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a5443c18
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a5443c18

Branch: refs/heads/branch-2
Commit: a5443c18d2c8f92cb1205f494d613aeb101aeeb1
Parents: 0152d5e
Author: Artem Ervits 
Authored: Tue Feb 20 10:15:27 2018 -0500
Committer: tedyu 
Committed: Tue Feb 20 08:13:59 2018 -0800

--
 .../org/apache/hadoop/hbase/regionserver/CompactionTool.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a5443c18/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
index 00ad54b..78db6fc 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
@@ -460,10 +460,10 @@ public class CompactionTool extends Configured implements 
Tool {
 System.err.println();
 System.err.println("Examples:");
 System.err.println(" To compact the full 'TestTable' using MapReduce:");
-System.err.println(" $ hbase " + this.getClass().getName() + " -mapred 
hdfs:///hbase/data/default/TestTable");
+System.err.println(" $ hbase " + this.getClass().getName() + " -mapred 
hdfs://hbase/data/default/TestTable");
 System.err.println();
 System.err.println(" To compact column family 'x' of the table 'TestTable' 
region 'abc':");
-System.err.println(" $ hbase " + this.getClass().getName() + " 
hdfs:///hbase/data/default/TestTable/abc/x");
+System.err.println(" $ hbase " + this.getClass().getName() + " 
hdfs://hbase/data/default/TestTable/abc/x");
   }
 
   public static void main(String[] args) throws Exception {



hbase git commit: HBASE-20023 CompactionTool command line examples are incorrect

2018-02-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 69d2becc7 -> 2b1969898


HBASE-20023 CompactionTool command line examples are incorrect

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2b196989
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2b196989
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2b196989

Branch: refs/heads/master
Commit: 2b1969898f76e227ba3c89c7e5e8127f11236496
Parents: 69d2bec
Author: Artem Ervits 
Authored: Tue Feb 20 10:15:27 2018 -0500
Committer: tedyu 
Committed: Tue Feb 20 08:13:12 2018 -0800

--
 .../org/apache/hadoop/hbase/regionserver/CompactionTool.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2b196989/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
index 00ad54b..78db6fc 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
@@ -460,10 +460,10 @@ public class CompactionTool extends Configured implements 
Tool {
 System.err.println();
 System.err.println("Examples:");
 System.err.println(" To compact the full 'TestTable' using MapReduce:");
-System.err.println(" $ hbase " + this.getClass().getName() + " -mapred 
hdfs:///hbase/data/default/TestTable");
+System.err.println(" $ hbase " + this.getClass().getName() + " -mapred 
hdfs://hbase/data/default/TestTable");
 System.err.println();
 System.err.println(" To compact column family 'x' of the table 'TestTable' 
region 'abc':");
-System.err.println(" $ hbase " + this.getClass().getName() + " 
hdfs:///hbase/data/default/TestTable/abc/x");
+System.err.println(" $ hbase " + this.getClass().getName() + " 
hdfs://hbase/data/default/TestTable/abc/x");
   }
 
   public static void main(String[] args) throws Exception {



[1/4] hbase git commit: HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

2018-02-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 99306bba7 -> 7990546ef
  refs/heads/branch-1.2 ef847f841 -> dacaaea0f
  refs/heads/branch-1.3 1bd38fde4 -> 149ebf3f6
  refs/heads/branch-1.4 83bbad8bd -> af5cd4985


HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

(cherry picked from commit 47082579a85fae1b3f7156963609819369b353d8)
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7990546e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7990546e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7990546e

Branch: refs/heads/branch-1
Commit: 7990546efef3aaadcd1ebaa8b8ac01f37df0a119
Parents: 99306bb
Author: chenheng 
Authored: Tue Dec 1 10:31:00 2015 +0800
Committer: Sean Busbey 
Committed: Tue Feb 20 08:50:22 2018 -0600

--
 .../hbase/master/TestTableLockManager.java  | 32 +---
 1 file changed, 28 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7990546e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
index 67806c5..16a6450 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.InterProcessLock;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
@@ -55,6 +55,7 @@ import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.exceptions.LockTimeoutException;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LoadTestTool;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
@@ -130,7 +131,7 @@ public class TestTableLockManager {
 
 HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
 
master.getMasterCoprocessorHost().load(TestAlterAndDisableMasterObserver.class,
-0, TEST_UTIL.getConfiguration());
+0, TEST_UTIL.getConfiguration());
 
 ExecutorService executor = Executors.newFixedThreadPool(2);
 Future alterTableFuture = executor.submit(new Callable() {
@@ -234,6 +235,23 @@ public class TestTableLockManager {
 
   }
 
+  public class TableLockCounter implements InterProcessLock.MetadataHandler {
+
+private int lockCount = 0;
+
+@Override
+public void handleMetadata(byte[] metadata) {
+  lockCount++;
+}
+
+public void reset() {
+  lockCount = 0;
+}
+
+public int getLockCount() {
+  return lockCount;
+}
+  }
 
   @Test(timeout = 60)
   public void testReapAllTableLocks() throws Exception {
@@ -258,7 +276,7 @@ public class TestTableLockManager {
   public Void call() throws Exception {
 writeLocksAttempted.countDown();
 lockManager.writeLock(TableName.valueOf(table),
-"testReapAllTableLocks").acquire();
+"testReapAllTableLocks").acquire();
 writeLocksObtained.countDown();
 return null;
   }
@@ -269,9 +287,15 @@ public class TestTableLockManager {
 writeLocksObtained.await();
 writeLocksAttempted.await();
 
+TableLockCounter counter = new TableLockCounter();
+do {
+  counter.reset();
+  lockManager.visitAllLocks(counter);
+  Thread.sleep(10);
+} while (counter.getLockCount() != 10);
+
 //now reap all table locks
 lockManager.reapWriteLocks();
-
 
TEST_UTIL.getConfiguration().setInt(TableLockManager.TABLE_WRITE_LOCK_TIMEOUT_MS,
 0);
 TableLockManager zeroTimeoutLockManager = 
TableLockManager.createTableLockManager(
   TEST_UTIL.getConfiguration(), TEST_UTIL.getZooKeeperWatcher(), 
serverName);



[3/4] hbase git commit: HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

2018-02-20 Thread busbey
HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

(cherry picked from commit 47082579a85fae1b3f7156963609819369b353d8)
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/149ebf3f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/149ebf3f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/149ebf3f

Branch: refs/heads/branch-1.3
Commit: 149ebf3f6fcb7413f54369f62ff557b0ee6e71d5
Parents: 1bd38fd
Author: chenheng 
Authored: Tue Dec 1 10:31:00 2015 +0800
Committer: Sean Busbey 
Committed: Tue Feb 20 09:17:12 2018 -0600

--
 .../hbase/master/TestTableLockManager.java  | 32 +---
 1 file changed, 28 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/149ebf3f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
index 67806c5..16a6450 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.InterProcessLock;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
@@ -55,6 +55,7 @@ import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.exceptions.LockTimeoutException;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LoadTestTool;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
@@ -130,7 +131,7 @@ public class TestTableLockManager {
 
 HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
 
master.getMasterCoprocessorHost().load(TestAlterAndDisableMasterObserver.class,
-0, TEST_UTIL.getConfiguration());
+0, TEST_UTIL.getConfiguration());
 
 ExecutorService executor = Executors.newFixedThreadPool(2);
 Future alterTableFuture = executor.submit(new Callable() {
@@ -234,6 +235,23 @@ public class TestTableLockManager {
 
   }
 
+  public class TableLockCounter implements InterProcessLock.MetadataHandler {
+
+private int lockCount = 0;
+
+@Override
+public void handleMetadata(byte[] metadata) {
+  lockCount++;
+}
+
+public void reset() {
+  lockCount = 0;
+}
+
+public int getLockCount() {
+  return lockCount;
+}
+  }
 
   @Test(timeout = 60)
   public void testReapAllTableLocks() throws Exception {
@@ -258,7 +276,7 @@ public class TestTableLockManager {
   public Void call() throws Exception {
 writeLocksAttempted.countDown();
 lockManager.writeLock(TableName.valueOf(table),
-"testReapAllTableLocks").acquire();
+"testReapAllTableLocks").acquire();
 writeLocksObtained.countDown();
 return null;
   }
@@ -269,9 +287,15 @@ public class TestTableLockManager {
 writeLocksObtained.await();
 writeLocksAttempted.await();
 
+TableLockCounter counter = new TableLockCounter();
+do {
+  counter.reset();
+  lockManager.visitAllLocks(counter);
+  Thread.sleep(10);
+} while (counter.getLockCount() != 10);
+
 //now reap all table locks
 lockManager.reapWriteLocks();
-
 
TEST_UTIL.getConfiguration().setInt(TableLockManager.TABLE_WRITE_LOCK_TIMEOUT_MS,
 0);
 TableLockManager zeroTimeoutLockManager = 
TableLockManager.createTableLockManager(
   TEST_UTIL.getConfiguration(), TEST_UTIL.getZooKeeperWatcher(), 
serverName);



[2/4] hbase git commit: HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

2018-02-20 Thread busbey
HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

(cherry picked from commit 47082579a85fae1b3f7156963609819369b353d8)
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af5cd498
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af5cd498
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af5cd498

Branch: refs/heads/branch-1.4
Commit: af5cd4985df91a6fc8b4e762103d45602a59162f
Parents: 83bbad8
Author: chenheng 
Authored: Tue Dec 1 10:31:00 2015 +0800
Committer: Sean Busbey 
Committed: Tue Feb 20 09:11:43 2018 -0600

--
 .../hbase/master/TestTableLockManager.java  | 32 +---
 1 file changed, 28 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af5cd498/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
index 67806c5..16a6450 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.InterProcessLock;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
@@ -55,6 +55,7 @@ import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.exceptions.LockTimeoutException;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LoadTestTool;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
@@ -130,7 +131,7 @@ public class TestTableLockManager {
 
 HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
 
master.getMasterCoprocessorHost().load(TestAlterAndDisableMasterObserver.class,
-0, TEST_UTIL.getConfiguration());
+0, TEST_UTIL.getConfiguration());
 
 ExecutorService executor = Executors.newFixedThreadPool(2);
 Future alterTableFuture = executor.submit(new Callable() {
@@ -234,6 +235,23 @@ public class TestTableLockManager {
 
   }
 
+  public class TableLockCounter implements InterProcessLock.MetadataHandler {
+
+private int lockCount = 0;
+
+@Override
+public void handleMetadata(byte[] metadata) {
+  lockCount++;
+}
+
+public void reset() {
+  lockCount = 0;
+}
+
+public int getLockCount() {
+  return lockCount;
+}
+  }
 
   @Test(timeout = 60)
   public void testReapAllTableLocks() throws Exception {
@@ -258,7 +276,7 @@ public class TestTableLockManager {
   public Void call() throws Exception {
 writeLocksAttempted.countDown();
 lockManager.writeLock(TableName.valueOf(table),
-"testReapAllTableLocks").acquire();
+"testReapAllTableLocks").acquire();
 writeLocksObtained.countDown();
 return null;
   }
@@ -269,9 +287,15 @@ public class TestTableLockManager {
 writeLocksObtained.await();
 writeLocksAttempted.await();
 
+TableLockCounter counter = new TableLockCounter();
+do {
+  counter.reset();
+  lockManager.visitAllLocks(counter);
+  Thread.sleep(10);
+} while (counter.getLockCount() != 10);
+
 //now reap all table locks
 lockManager.reapWriteLocks();
-
 
TEST_UTIL.getConfiguration().setInt(TableLockManager.TABLE_WRITE_LOCK_TIMEOUT_MS,
 0);
 TableLockManager zeroTimeoutLockManager = 
TableLockManager.createTableLockManager(
   TEST_UTIL.getConfiguration(), TEST_UTIL.getZooKeeperWatcher(), 
serverName);



[4/4] hbase git commit: HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

2018-02-20 Thread busbey
HBASE-14897 TestTableLockManager.testReapAllTableLocks is flakey

(cherry picked from commit 47082579a85fae1b3f7156963609819369b353d8)
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dacaaea0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dacaaea0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dacaaea0

Branch: refs/heads/branch-1.2
Commit: dacaaea0f634e8330d01e2fcd267cf38255e2969
Parents: ef847f8
Author: chenheng 
Authored: Tue Dec 1 10:31:00 2015 +0800
Committer: Sean Busbey 
Committed: Tue Feb 20 09:22:00 2018 -0600

--
 .../hbase/master/TestTableLockManager.java  | 32 +---
 1 file changed, 28 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dacaaea0/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
index 67806c5..16a6450 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.InterProcessLock;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
@@ -55,6 +55,7 @@ import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.exceptions.LockTimeoutException;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LoadTestTool;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
@@ -130,7 +131,7 @@ public class TestTableLockManager {
 
 HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
 
master.getMasterCoprocessorHost().load(TestAlterAndDisableMasterObserver.class,
-0, TEST_UTIL.getConfiguration());
+0, TEST_UTIL.getConfiguration());
 
 ExecutorService executor = Executors.newFixedThreadPool(2);
 Future alterTableFuture = executor.submit(new Callable() {
@@ -234,6 +235,23 @@ public class TestTableLockManager {
 
   }
 
+  public class TableLockCounter implements InterProcessLock.MetadataHandler {
+
+private int lockCount = 0;
+
+@Override
+public void handleMetadata(byte[] metadata) {
+  lockCount++;
+}
+
+public void reset() {
+  lockCount = 0;
+}
+
+public int getLockCount() {
+  return lockCount;
+}
+  }
 
   @Test(timeout = 60)
   public void testReapAllTableLocks() throws Exception {
@@ -258,7 +276,7 @@ public class TestTableLockManager {
   public Void call() throws Exception {
 writeLocksAttempted.countDown();
 lockManager.writeLock(TableName.valueOf(table),
-"testReapAllTableLocks").acquire();
+"testReapAllTableLocks").acquire();
 writeLocksObtained.countDown();
 return null;
   }
@@ -269,9 +287,15 @@ public class TestTableLockManager {
 writeLocksObtained.await();
 writeLocksAttempted.await();
 
+TableLockCounter counter = new TableLockCounter();
+do {
+  counter.reset();
+  lockManager.visitAllLocks(counter);
+  Thread.sleep(10);
+} while (counter.getLockCount() != 10);
+
 //now reap all table locks
 lockManager.reapWriteLocks();
-
 
TEST_UTIL.getConfiguration().setInt(TableLockManager.TABLE_WRITE_LOCK_TIMEOUT_MS,
 0);
 TableLockManager zeroTimeoutLockManager = 
TableLockManager.createTableLockManager(
   TEST_UTIL.getConfiguration(), TEST_UTIL.getZooKeeperWatcher(), 
serverName);



[42/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/book.html
--
diff --git a/book.html b/book.html
index 656a743..0cd9468 100644
--- a/book.html
+++ b/book.html
@@ -37303,7 +37303,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2018-02-18 14:29:34 UTC
+Last updated 2018-02-20 14:29:35 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 7788c1f..608dae6 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-02-18
+  Last Published: 
2018-02-20
 
 
 



[08/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index e5fdac5..ad7c82a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -33,299 +33,303 @@
 025import org.apache.hadoop.hbase.Cell;
 026import 
org.apache.hadoop.hbase.CompareOperator;
 027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-036/**
-037 * This is a generic filter to be used to 
filter by comparison.  It takes an
-038 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-039 * 

-040 * To filter by row key, use {@link RowFilter}. +028import org.apache.hadoop.hbase.util.Bytes; +029import org.apache.yetus.audience.InterfaceAudience; +030 +031import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +032 +033import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +034import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; +035import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; +036import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; +037 +038/** +039 * This is a generic filter to be used to filter by comparison. It takes an +040 * operator (equal, greater, not equal, etc) and a byte [] comparator. 041 *

-042 * To filter by column qualifier, use {@link QualifierFilter}. +042 * To filter by row key, use {@link RowFilter}. 043 *

-044 * To filter by value, use {@link SingleColumnValueFilter}. +044 * To filter by column family, use {@link FamilyFilter}. 045 *

-046 * These filters can be wrapped with {@link SkipFilter} and {@link WhileMatchFilter} -047 * to add more control. -048 *

-049 * Multiple filters can be combined using {@link FilterList}. -050 */ -051@InterfaceAudience.Public -052public abstract class CompareFilter extends FilterBase { -053 /** -054 * Comparison operators. For filters only! -055 * Use {@link CompareOperator} otherwise. -056 * It (intentionally) has at least the below enums with same names. -057 * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link CompareOperator} instead. -058 */ -059 @Deprecated -060 @InterfaceAudience.Public -061 public enum CompareOp { -062/** less than */ -063LESS, -064/** less than or equal to */ -065LESS_OR_EQUAL, -066/** equals */ -067EQUAL, -068/** not equal */ -069NOT_EQUAL, -070/** greater than or equal to */ -071GREATER_OR_EQUAL, -072/** greater than */ -073GREATER, -074/** no operation */ -075NO_OP, -076 } -077 -078 protected CompareOperator op; -079 protected ByteArrayComparable comparator; -080 -081 /** -082 * Constructor. -083 * @param compareOp the compare op for row matching -084 * @param comparator the comparator for row matching -085 * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use other constructor. -086 */ -087 @Deprecated -088 public CompareFilter(final CompareOp compareOp, -089 final ByteArrayComparable comparator) { -090 this(CompareOperator.valueOf(compareOp.name()), comparator); -091 } -092 -093 /** -094 * Constructor. -095 * @param op the compare op for row matching -096 * @param comparator the comparator for row matching -097 */ -098 public CompareFilter(final CompareOperator op, -099 final ByteArrayComparable comparator) { -100this.op = op; -101this.comparator = comparator; -102 } -103 -104 /** -105 * @return operator -106 * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. -107 */ -108 @Deprecated -109 public CompareOp getOperator() { -110return CompareOp.valueOf(op.name()); -111 } -112 -113 public CompareOperator getCompareOperator() { -114return op; +046 * To filter by column qualifier, use {@link QualifierFilter}. +047 *

+048 * To filter by value, use {@link ValueFilter}. +049 *

+050 * These filters can be wrapped with {@link SkipFilter} and {@link WhileMatchFilter} +051 * to add more control. +052 *

+053 * Multiple filters can be combined usin


[43/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
index 49de9ff..d98b2a6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
@@ -101,684 +101,684 @@
 093  
"SingleColumnValueExcludeFilter");
 094
filterHashMap.put("DependentColumnFilter", ParseConstants.FILTER_PACKAGE + "." 
+
 095  
"DependentColumnFilter");
-096
-097// Creates the 
operatorPrecedenceHashMap
-098operatorPrecedenceHashMap = new 
HashMap<>();
-099
operatorPrecedenceHashMap.put(ParseConstants.SKIP_BUFFER, 1);
-100
operatorPrecedenceHashMap.put(ParseConstants.WHILE_BUFFER, 1);
-101
operatorPrecedenceHashMap.put(ParseConstants.AND_BUFFER, 2);
-102
operatorPrecedenceHashMap.put(ParseConstants.OR_BUFFER, 3);
-103  }
-104
-105  /**
-106   * Parses the filterString and 
constructs a filter using it
-107   * 

-108 * @param filterString filter string given by the user -109 * @return filter object we constructed -110 */ -111 public Filter parseFilterString (String filterString) -112throws CharacterCodingException { -113return parseFilterString(Bytes.toBytes(filterString)); -114 } -115 -116 /** -117 * Parses the filterString and constructs a filter using it -118 *

-119 * @param filterStringAsByteArray filter string given by the user -120 * @return filter object we constructed -121 */ -122 public Filter parseFilterString (byte [] filterStringAsByteArray) -123throws CharacterCodingException { -124// stack for the operators and parenthesis -125Stack operatorStack = new Stack<>(); -126// stack for the filter objects -127Stack filterStack = new Stack<>(); -128 -129Filter filter = null; -130for (int i=0; i


[39/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index e1c4aa9..72631d0 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -850,61 +850,55 @@
 
 
 
-org.apache.hadoop.hbase.filter.ParseFilter.createCompareOperator(byte[])
-Since 2.0
- 
-
-
-
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], byte[], boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], byte[], boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], byte[], int, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], byte[], int, boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], long, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], long, boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], long, int, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], long, int, boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], String, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], String, boolean).
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  This will be removed in HBase 3.0.0.
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
+
 org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
 Since 2.0. Will be removed 
in 3.0. Use
  HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteSnapshots(String)
 since 2.0 version and will 
be removed in 3.0 version.
  Use Admin.deleteSnapshots(Pattern)
 instead.
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteTables(Pattern)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -912,7 +906,7 @@
  and Admin.deleteTable(TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteTables(String)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -920,30 +914,30 @@
  and Admin.deleteTable(TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteTableSnapshots(String,
 String)
 since 2.0 version and will 
be removed in 3.0 version.
  Use Admin.deleteTableSnapshots(Pattern,
 Pattern) instead.
 
 
-
+
 org.apache.hadoop.hbase.master.TableStateManager.deleteZooKeeper(TableName)
 Since 2.0.0. To be removed 
in hbase-3.0.0.
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disablePeer(String)
 use Admin.disableReplicationPeer(String)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disableTableRep(TableName)
 use Admin.disableTableReplication(TableName)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.disableTables(Pattern)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -951,7 +945,7 @@
  and Admin.disableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.disableTables(String)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -959,25 +953,25 @@
  and Admin.disableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.enableCatalogJanitor(boolean)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Admin.catalogJanitorSwitch(boolean)}
  instead.
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enablePeer(String)
 use Admin.enableReplicationPeer(String)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enableTableRep(TableName)
 use Admin.enableTableReplication(TableName)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.enableTables(Pattern)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -985,7 +979,7 @@
  and Admin.enableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.enableTables(String)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -993

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index 1f9cc78..2a42011 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
  @InterfaceAudience.Public
-public static enum CompareFilter.CompareOp
+public static enum CompareFilter.CompareOp
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 Comparison operators. For filters only!
  Use CompareOperator 
otherwise.
@@ -256,7 +256,7 @@ the order they are declared.
 
 
 LESS
-public static final CompareFilter.CompareOp LESS
+public static final CompareFilter.CompareOp LESS
 Deprecated. 
 less than
 
@@ -267,7 +267,7 @@ the order they are declared.
 
 
 LESS_OR_EQUAL
-public static final CompareFilter.CompareOp LESS_OR_EQUAL
+public static final CompareFilter.CompareOp LESS_OR_EQUAL
 Deprecated. 
 less than or equal to
 
@@ -278,7 +278,7 @@ the order they are declared.
 
 
 EQUAL
-public static final CompareFilter.CompareOp EQUAL
+public static final CompareFilter.CompareOp EQUAL
 Deprecated. 
 equals
 
@@ -289,7 +289,7 @@ the order they are declared.
 
 
 NOT_EQUAL
-public static final CompareFilter.CompareOp NOT_EQUAL
+public static final CompareFilter.CompareOp NOT_EQUAL
 Deprecated. 
 not equal
 
@@ -300,7 +300,7 @@ the order they are declared.
 
 
 GREATER_OR_EQUAL
-public static final CompareFilter.CompareOp GREATER_OR_EQUAL
+public static final CompareFilter.CompareOp GREATER_OR_EQUAL
 Deprecated. 
 greater than or equal to
 
@@ -311,7 +311,7 @@ the order they are declared.
 
 
 GREATER
-public static final CompareFilter.CompareOp GREATER
+public static final CompareFilter.CompareOp GREATER
 Deprecated. 
 greater than
 
@@ -322,7 +322,7 @@ the order they are declared.
 
 
 NO_OP
-public static final CompareFilter.CompareOp NO_OP
+public static final CompareFilter.CompareOp NO_OP
 Deprecated. 
 no operation
 
@@ -341,7 +341,7 @@ the order they are declared.
 
 
 values
-public static CompareFilter.CompareOp[] values()
+public static CompareFilter.CompareOp[] values()
 Deprecated. 
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
@@ -362,7 +362,7 @@ for (CompareFilter.CompareOp c : 
CompareFilter.CompareOp.values())
 
 
 valueOf
-public static CompareFilter.CompareOp valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompareFilter.CompareOp valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Deprecated. 
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
index fec4140..ef9bbfa 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -124,16 +124,18 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public abstract class CompareFilter
+public abstract class CompareFilter
 extends FilterBase
 This is a generic filter to be used to filter by 
comparison.  It takes an
  operator (equal, greater, not equal, etc) and a byte [] comparator.
  
  To filter by row key, use RowFilter.
  
+ To filter by column family, use FamilyFilter.
+ 
  To filter by column qualifier, use QualifierFilter.
  
- To filter by value, use SingleColumnValueFilter.
+ To filter by value, use ValueFilter.
  
  These filters can be wrapped with SkipFilter and 
WhileMatchFilter
  to add more control.
@@ -407,7 +409,7 @@ extends 
 
 op
-protected CompareOperator op
+protected CompareOperator op
 
 
 
@@ -416,7 +418,7 @@ extends 
 
 comparator
-protected ByteArrayComparable comparator
+protected ByteArrayComparable comparator
 
 
 
@@ -434,7 +436,7 @@ extends 
 CompareFilter
 http

[27/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index b419e6d..8e88e74 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -2585,7 +2585,7 @@ implements 
 
 stopProcedureExecutor
-private void stopProcedureExecutor()
+private void stopProcedureExecutor()
 
 
 
@@ -2594,7 +2594,7 @@ implements 
 
 stopChores
-private void stopChores()
+private void stopChores()
 
 
 
@@ -2603,7 +2603,7 @@ implements 
 
 getRemoteInetAddress
-http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true";
 title="class or interface in java.net">InetAddress getRemoteInetAddress(int port,
+http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true";
 title="class or interface in java.net">InetAddress getRemoteInetAddress(int port,
  long serverStartCode)
   throws http://docs.oracle.com/javase/8/docs/api/java/net/UnknownHostException.html?is-external=true";
 title="class or interface in java.net">UnknownHostException
 
@@ -2620,7 +2620,7 @@ implements 
 
 getMaxBalancingTime
-private int getMaxBalancingTime()
+private int getMaxBalancingTime()
 
 Returns:
 Maximum time we should run balancer for
@@ -2633,7 +2633,7 @@ implements 
 
 getMaxRegionsInTransition
-private int getMaxRegionsInTransition()
+private int getMaxRegionsInTransition()
 
 Returns:
 Maximum number of regions in transition
@@ -2646,7 +2646,7 @@ implements 
 
 balanceThrottling
-private void balanceThrottling(long nextBalanceStartTime,
+private void balanceThrottling(long nextBalanceStartTime,
int maxRegionsInTransition,
long cutoffTime)
 It first sleep to the next balance plan start time. 
Meanwhile, throttling by the max
@@ -2665,7 +2665,7 @@ implements 
 
 balance
-public boolean balance()
+public boolean balance()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -2679,7 +2679,7 @@ implements 
 
 balance
-public boolean balance(boolean force)
+public boolean balance(boolean force)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -2693,7 +2693,7 @@ implements 
 
 getRegionNormalizer
-public RegionNormalizer getRegionNormalizer()
+public RegionNormalizer getRegionNormalizer()
 
 Specified by:
 getRegionNormalizer in
 interface MasterServices
@@ -2708,7 +2708,7 @@ implements 
 
 normalizeRegions
-public boolean normalizeRegions()
+public boolean normalizeRegions()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Perform normalization of cluster (invoked by RegionNormalizerChore).
 
@@ -2727,7 +2727,7 @@ implements 
 
 getClientIdAuditPrefix
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getClientIdAuditPrefix()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getClientIdAuditPrefix()
 
 Specified by:
 getClientIdAuditPrefix in
 interface MasterServices
@@ -2742,7 +2742,7 @@ implements 
 
 setCatalogJanitorEnabled
-public void setCatalogJanitorEnabled(boolean b)
+public void setCatalogJanitorEnabled(boolean b)
 Switch for the background CatalogJanitor thread.
  Used for testing.  The thread will continue to run.  It will just be a noop
  if disabled.
@@ -2758,7 +2758,7 @@ implements 
 
 mergeRegions
-public long mergeRegions(RegionInfo[] regionsToMerge,
+public long mergeRegions(RegionInfo[] regionsToMerge,
  boolean forcible,
  long nonceGroup,
  long nonce)
@@ -2786,7 +2786,7 @@ implements 
 
 splitRegion
-public long splitRegion(RegionInfo regionInfo,
+public long splitRegion(RegionInfo regionInfo,
 byte[] splitRow,
 long nonceGroup,
 long nonce)
@@ -2814,7 +2814,7 @@ implements 
 
 move
-public void move(byte[] encodedRegionName,
+public void move(byte[] encodedRegionName,
  byte[] destServerName)
   throws HBaseIOException
 
@@ -2829,7 +2829,7 @@ implements 
 
 createTable
-public long createTable(TableDescriptor tableDescriptor,
+public long createTable(TableDescriptor tableDescriptor,
 byte[][] splitKeys,

[25/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.DelayedProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.DelayedProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.DelayedProcedure.html
deleted file mode 100644
index 3131a76..000
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.DelayedProcedure.html
+++ /dev/null
@@ -1,282 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-ProcedureExecutor.DelayedProcedure (Apache HBase 3.0.0-SNAPSHOT 
API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.procedure2
-Class 
ProcedureExecutor.DelayedProcedure
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedObject
-
-
-org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainer
-
-
-org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp
-
-
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.DelayedProcedure
-
-
-
-
-
-
-
-
-
-
-
-
-
-All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">ComparableDelayed>, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Delayed.html?is-external=true";
 title="class or interface in java.util.concurrent">Delayed, DelayedUtil.DelayedWithTimeout
-
-
-Enclosing class:
-ProcedureExecutor
-
-
-
-private static final class ProcedureExecutor.DelayedProcedure
-extends DelayedUtil.DelayedContainerWithTimestamp
-
-
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors 
-
-Constructor and Description
-
-
-DelayedProcedure(Procedure procedure) 
-
-
-
-
-
-
-
-
-
-Method Summary
-
-
-
-
-Methods inherited from 
class org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp
-getTimeout,
 setTimeout
-
-
-
-
-
-Methods inherited from 
class org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainer
-equals,
 getObject,
 hashCode,
 toString
-
-
-
-
-
-Methods inherited from 
class org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedObject
-compareTo,
 getDelay
-
-
-
-
-
-Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--";
 title="class or interface in java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--";
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-ex
 ternal=true#wait-long-" title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-";
 title="class or interface in java.lang">wait
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Constructor Detail
-
-
-
-
-
-DelayedProcedure
-public DelayedProcedure(Procedure procedure)
-
-
-
-
-
-
-
-
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes

[47/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
index 9a42c62..70c2bc9 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":41,"i9":41,"i10":9,"i11":10,"i12":9,"i13":9,"i14":9,"i15":10,"i16":10,"i17":9,"i18":9,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":9,"i25":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":41,"i9":9,"i10":9,"i11":10,"i12":9,"i13":9,"i14":9,"i15":10,"i16":10,"i17":9,"i18":9,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":9,"i25":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -218,10 +218,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 static CompareOperator
 createCompareOperator(byte[] compareOpAsByteArray)
-Deprecated. 
-Since 2.0
- 
-
+Takes a compareOperator symbol as a byte array and returns 
the corresponding CompareOperator
 
 
 
@@ -377,7 +374,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 parseFilterString
-public Filter parseFilterString(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String filterString)
+public Filter parseFilterString(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String filterString)
  throws http://docs.oracle.com/javase/8/docs/api/java/nio/charset/CharacterCodingException.html?is-external=true";
 title="class or interface in 
java.nio.charset">CharacterCodingException
 Parses the filterString and constructs a filter using it
  
@@ -397,7 +394,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 parseFilterString
-public Filter parseFilterString(byte[] filterStringAsByteArray)
+public Filter parseFilterString(byte[] filterStringAsByteArray)
  throws http://docs.oracle.com/javase/8/docs/api/java/nio/charset/CharacterCodingException.html?is-external=true";
 title="class or interface in 
java.nio.charset">CharacterCodingException
 Parses the filterString and constructs a filter using it
  
@@ -417,7 +414,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 extractFilterSimpleExpression
-public byte[] extractFilterSimpleExpression(byte[] filterStringAsByteArray,
+public byte[] extractFilterSimpleExpression(byte[] filterStringAsByteArray,
 
int filterExpressionStartOffset)
  throws http://docs.oracle.com/javase/8/docs/api/java/nio/charset/CharacterCodingException.html?is-external=true";
 title="class or interface in 
java.nio.charset">CharacterCodingException
 Extracts a simple filter expression from the filter string 
given by the user
@@ -446,7 +443,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 parseSimpleFilterExpression
-public Filter parseSimpleFilterExpression(byte[] filterStringAsByteArray)
+public Filter parseSimpleFilterExpression(byte[] filterStringAsByteArray)
throws http://docs.oracle.com/javase/8/docs/api/java/nio/charset/CharacterCodingException.html?is-external=true";
 title="class or interface in 
java.nio.charset">CharacterCodingException
 Constructs a filter object given a simple filter expression
  
@@ -466,7 +463,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getFilterName
-public static byte[] getFilterName(byte[] filterStringAsByteArray)
+public static byte[] getFilterName(byte[] filterStringAsByteArray)
 Returns the filter name given a simple filter expression
  
 
@@ -483,7 +480,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getFilterArguments
-public static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList getFilterArguments(byte[] filterStringAsByteArray)
+public static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList getFilterArguments(byte[] filterStringAsByteArray)
 Returns the arguments of the filter from the filter string
  
 
@@ -500,7 +497,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 reduce
-pu

[10/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
index 4d03740..2f29cd8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
@@ -170,1134 +170,1131 @@
 162  final RpcRetryingCallerFactory 
rpcCallerFactory,
 163  final RpcControllerFactory 
rpcControllerFactory,
 164  final ExecutorService pool) {
-165if (connection == null || 
connection.isClosed()) {
-166  throw new 
IllegalArgumentException("Connection is null or closed.");
-167}
-168this.connection = connection;
-169this.configuration = 
connection.getConfiguration();
-170this.connConfiguration = 
connection.getConnectionConfiguration();
-171if (pool == null) {
-172  this.pool = 
getDefaultExecutor(this.configuration);
-173  this.cleanupPoolOnClose = true;
-174} else {
-175  this.pool = pool;
-176  this.cleanupPoolOnClose = false;
-177}
-178if (rpcCallerFactory == null) {
-179  this.rpcCallerFactory = 
connection.getNewRpcRetryingCallerFactory(configuration);
-180} else {
-181  this.rpcCallerFactory = 
rpcCallerFactory;
-182}
-183
-184if (rpcControllerFactory == null) {
-185  this.rpcControllerFactory = 
RpcControllerFactory.instantiate(configuration);
-186} else {
-187  this.rpcControllerFactory = 
rpcControllerFactory;
-188}
-189
-190this.tableName = builder.tableName;
-191this.operationTimeoutMs = 
builder.operationTimeout;
-192this.rpcTimeoutMs = 
builder.rpcTimeout;
-193this.readRpcTimeoutMs = 
builder.readRpcTimeout;
-194this.writeRpcTimeoutMs = 
builder.writeRpcTimeout;
-195this.scannerCaching = 
connConfiguration.getScannerCaching();
-196this.scannerMaxResultSize = 
connConfiguration.getScannerMaxResultSize();
-197
-198// puts need to track errors globally 
due to how the APIs currently work.
-199multiAp = 
this.connection.getAsyncProcess();
-200this.locator = new 
HRegionLocator(tableName, connection);
-201  }
-202
-203  /**
-204   * @return maxKeyValueSize from 
configuration.
-205   */
-206  public static int 
getMaxKeyValueSize(Configuration conf) {
-207return 
conf.getInt(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY, -1);
-208  }
-209
-210  @Override
-211  public Configuration getConfiguration() 
{
-212return configuration;
-213  }
-214
-215  @Override
-216  public TableName getName() {
-217return tableName;
-218  }
-219
-220  /**
-221   * INTERNAL Used 
by unit tests and tools to do low-level
-222   * manipulations.
-223   * @return A Connection instance.
-224   */
-225  @VisibleForTesting
-226  protected Connection getConnection() 
{
-227return this.connection;
-228  }
-229
-230  @Override
-231  @Deprecated
-232  public HTableDescriptor 
getTableDescriptor() throws IOException {
-233HTableDescriptor htd = 
HBaseAdmin.getHTableDescriptor(tableName, connection, rpcCallerFactory,
-234  rpcControllerFactory, 
operationTimeoutMs, readRpcTimeoutMs);
-235if (htd != null) {
-236  return new 
ImmutableHTableDescriptor(htd);
-237}
-238return null;
-239  }
-240
-241  @Override
-242  public TableDescriptor getDescriptor() 
throws IOException {
-243return 
HBaseAdmin.getTableDescriptor(tableName, connection, rpcCallerFactory,
-244  rpcControllerFactory, 
operationTimeoutMs, readRpcTimeoutMs);
-245  }
-246
-247  /**
-248   * Get the corresponding start keys and 
regions for an arbitrary range of
-249   * keys.
-250   * 

-251 * @param startKey Starting row in range, inclusive -252 * @param endKey Ending row in range -253 * @param includeEndKey true if endRow is inclusive, false if exclusive -254 * @return A pair of list of start keys and list of HRegionLocations that -255 * contain the specified range -256 * @throws IOException if a remote or network exception occurs -257 */ -258 private Pair, List> getKeysAndRegionsInRange( -259 final byte[] startKey, final byte[] endKey, final boolean includeEndKey) -260 throws IOException { -261return getKeysAndRegionsInRange(startKey, endKey, includeEndKey, false); -262 } -263 -264 /** -265 * Get the corresponding start keys and regions for an arbitrary range of -266 * keys. -267 *

-268 * @param startKey Starting row in range, inclusive -269 * @param endKey Ending row in range -270 * @param includeEndKey true if endRow is inclusive, false if exclusive -271 * @param reload true to reload information or false to use cached information -272 * @return A pair of list of start keys and list of HRegionLocations that -273 * contain the


[38/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 2e3c90e..8554335 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -,10 +,6 @@
 Append the given message to this buffer, automatically 
evicting
  older messages until the desired memory limit is achieved.
 
-add(ProcedureExecutor.InlineChore)
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor.TimeoutExecutorThread
- 
-add(Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor.TimeoutExecutorThread
- 
 add(RemoteProcedureDispatcher.RemoteProcedure)
 - Method in class org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.BufferNode
  
 add(RemoteProcedureDispatcher.RemoteProcedure) - Method in interface 
org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteNode
@@ -1123,6 +1119,10 @@
  
 add(ProcedureProtos.Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFormatReader.WalProcedureMap
  
+add(InlineChore)
 - Method in class org.apache.hadoop.hbase.procedure2.TimeoutExecutorThread
+ 
+add(Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.TimeoutExecutorThread
+ 
 add(Iterable,
 MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.AbstractMemStore
  
 add(Cell,
 MemStoreSizing) - Method in class 
org.apache.hadoop.hbase.regionserver.AbstractMemStore
@@ -3467,6 +3467,8 @@
  
 areSerializedFieldsEqual(Filter)
 - Method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
  
+areSerializedFieldsEqual(Filter)
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 areSerializedFieldsEqual(Filter)
 - Method in class org.apache.hadoop.hbase.filter.CompareFilter
  
 areSerializedFieldsEqual(Filter)
 - Method in class org.apache.hadoop.hbase.filter.DependentColumnFilter
@@ -4689,10 +4691,10 @@
  
 awaitTerminated(long,
 TimeUnit) - Method in class 
org.apache.hadoop.hbase.security.visibility.VisibilityReplicationEndpoint
  
-awaitTermination()
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor.StoppableThread
- 
 awaitTermination()
 - Method in class org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.TimeoutExecutorThread
  
+awaitTermination()
 - Method in class org.apache.hadoop.hbase.procedure2.StoppableThread
+ 
 
 
 
@@ -10256,6 +10258,8 @@
 
 checkClose()
 - Method in class org.apache.hadoop.hbase.client.BufferedMutatorImpl
  
+checkClosed()
 - Method in class org.apache.hadoop.hbase.client.ConnectionImplementation
+ 
 checkClusterIdExists(FileSystem,
 Path, int) - Static method in class org.apache.hadoop.hbase.util.FSUtils
 
 Checks that a cluster ID file exists in the HBase root 
directory
@@ -13538,6 +13542,8 @@
 An ModifyableFamilyDescriptor contains information about a 
column family such as the
  number of versions, compression settings, etc.
 
+columnFound
 - Variable in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 columnIndex
 - Variable in class org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTracker
  
 ColumnInterpreter - 
Class in org.apache.hadoop.hbase.cop
 rocessor
@@ -13622,6 +13628,15 @@
  and enforcement of columns and numbers of versions and timeToLive during
  the course of a Get or Scan operation.
 
+ColumnValueFilter - Class in org.apache.hadoop.hbase.filter
+
+Different from SingleColumnValueFilter 
which returns an entire row
+ when specified condition is matched, ColumnValueFilter return the 
matched cell only.
+
+ColumnValueFilter(byte[],
 byte[], CompareOperator, byte[]) - Constructor for class 
org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
+ColumnValueFilter(byte[],
 byte[], CompareOperator, ByteArrayComparable) - Constructor for 
class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 combine()
 - Method in class org.apache.hadoop.hbase.client.CompleteScanResultCache
  
 CombinedBlockCache - Class in org.apache.hadoop.hbase.io.hfile
@@ -14397,6 +14412,8 @@
  
 comparator
 - Variable in class org.apache.hadoop.hbase.coprocessor.example.ValueRewritingObserver
  
+comparator
 - Variable in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 comparator
 - Variable in class org.apache.hadoop.hbase.filter.CompareFilter
  
 comparator
 - Variable in class org.apache.hadoop.hbase.filter.SingleColumnValueFilter
@@ -15166,6 +15183,10 @@
  
 compareTypeBytes(Cell,
 Cell) - Method in class org.apache.hadoop.hbase.io.encoding.BufferedDataBlockEncoder.BufferedEncodedSeeker
  
+compareValue(CompareOperator,
 ByteArrayComparable, Cell) - Method in class 
org.apache.hadoop.hbase.filter.ColumnValueFilter
+
+This method is used to determine a cell should be inc

[22/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
index f9f5705..ecc059f 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class ProcedureExecutor
+public class ProcedureExecutor
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Thread Pool that executes the submitted procedures.
  The executor has a ProcedureStore associated.
@@ -153,24 +153,16 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 ProcedureExecutor.CompletedProcedureRetainer 
 
 
-private static class 
-ProcedureExecutor.DelayedProcedure 
-
-
 static class 
 ProcedureExecutor.FailedProcedure 
 
-
-private static class 
-ProcedureExecutor.InlineChore 
-
 
-static interface 
-ProcedureExecutor.ProcedureExecutorListener 
+private class 
+ProcedureExecutor.KeepAliveWorkerThread 
 
 
-private static class 
-ProcedureExecutor.StoppableThread 
+static interface 
+ProcedureExecutor.ProcedureExecutorListener 
 
 
 static class 
@@ -178,15 +170,9 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 private class 
-ProcedureExecutor.TimeoutExecutorThread
-Runs task on a period such as check for stuck workers.
-
-
-
-private class 
 ProcedureExecutor.WorkerMonitor 
 
-
+
 private class 
 ProcedureExecutor.WorkerThread 
 
@@ -260,58 +246,62 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 LOG 
 
 
+private int
+maxPoolSize 
+
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentHashMapLong>
 nonceKeysToProcIdsMap
 Helper map to lookup whether the procedure already issued 
from the same client.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentHashMapLong,Procedure>
 procedures
 Helper map to lookup the live procedures by ID.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentHashMapLong,RootProcedureState>
 rollbackStack
 Map the the procId returned by submitProcedure(), the 
Root-ProcID, to the RootProcedureState.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
 running 
 
-
+
 private ProcedureScheduler
 scheduler
 Scheduler/Queue that contains runnable procedures.
 
 
-
+
 private ProcedureStore
 store 
 
-
+
 (package private) ProcedureExecutor.

[26/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
index b99a6b3..f82f773 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
@@ -197,8 +197,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
 org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type
+org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 81c4c95..becce8e 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -346,11 +346,11 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
-org.apache.hadoop.hbase.master.RegionState.State
-org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus
 org.apache.hadoop.hbase.master.MetricsMasterSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus
 org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode
+org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
+org.apache.hadoop.hbase.master.RegionState.State
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 47d1eb6..e6678cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -320,13 +320,13 @@
 org.apache.hadoop.hbase.UnknownScannerException
 
 
+org.apache.hadoop.hbase.MasterNotRunningException
 org.apache.hadoop.hbase.PleaseHoldException
 org.apache.hadoop.hbase.RegionException
 org.apache.hadoop.hbase.ServiceNotRunningException
 org.apache.hadoop.hbase.TableInfoMissingException
 
 
-org.apache.hadoop.hbase.MasterNotRunningException
 org.apache.hadoop.hbase.NotServingRegionException
 org.apache.hadoop.hbase.RegionTooBusyException
 org.apache.hadoop.hbase.RetryImmediatelyException
@@ -440,20 +440,20 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.HConstants.OperationStatusCode
-org.apache.hadoop.hbase.CompareOperator
+org.apache.hadoop.hbase.MetaTableAccessor.QueryType
 org.apache.hadoop.hbase.MemoryCompactionPolicy
-org.apache.hadoop.hbase.Cell.Type
 org.apache.hadoop.hbase.Coprocessor.State
-org.apache.hadoop.hbase.Size.Unit
+org.apache.hadoop.hbase.CompareOperator
 org.apache.hadoop.hbase.ProcedureState
-org.apache.hadoop.hbase.ClusterMetrics.Option
-org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
-org.apache.hadoop.hbase.KeepDeletedCells
-org.apache.hadoop.hbase.MetaTableAccessor.QueryType
+org.apache.hadoop.hbase.HConstants.OperationStatusCode
+org.apache.hadoop.hbase.KeyValue.Type
 org.apache.hadoop.hbase.CellBuilderType
+org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
 org

[07/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
index e5fdac5..ad7c82a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -33,299 +33,303 @@
 025import org.apache.hadoop.hbase.Cell;
 026import 
org.apache.hadoop.hbase.CompareOperator;
 027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-036/**
-037 * This is a generic filter to be used to 
filter by comparison.  It takes an
-038 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-039 * 

-040 * To filter by row key, use {@link RowFilter}. +028import org.apache.hadoop.hbase.util.Bytes; +029import org.apache.yetus.audience.InterfaceAudience; +030 +031import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +032 +033import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +034import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; +035import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; +036import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; +037 +038/** +039 * This is a generic filter to be used to filter by comparison. It takes an +040 * operator (equal, greater, not equal, etc) and a byte [] comparator. 041 *

-042 * To filter by column qualifier, use {@link QualifierFilter}. +042 * To filter by row key, use {@link RowFilter}. 043 *

-044 * To filter by value, use {@link SingleColumnValueFilter}. +044 * To filter by column family, use {@link FamilyFilter}. 045 *

-046 * These filters can be wrapped with {@link SkipFilter} and {@link WhileMatchFilter} -047 * to add more control. -048 *

-049 * Multiple filters can be combined using {@link FilterList}. -050 */ -051@InterfaceAudience.Public -052public abstract class CompareFilter extends FilterBase { -053 /** -054 * Comparison operators. For filters only! -055 * Use {@link CompareOperator} otherwise. -056 * It (intentionally) has at least the below enums with same names. -057 * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link CompareOperator} instead. -058 */ -059 @Deprecated -060 @InterfaceAudience.Public -061 public enum CompareOp { -062/** less than */ -063LESS, -064/** less than or equal to */ -065LESS_OR_EQUAL, -066/** equals */ -067EQUAL, -068/** not equal */ -069NOT_EQUAL, -070/** greater than or equal to */ -071GREATER_OR_EQUAL, -072/** greater than */ -073GREATER, -074/** no operation */ -075NO_OP, -076 } -077 -078 protected CompareOperator op; -079 protected ByteArrayComparable comparator; -080 -081 /** -082 * Constructor. -083 * @param compareOp the compare op for row matching -084 * @param comparator the comparator for row matching -085 * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use other constructor. -086 */ -087 @Deprecated -088 public CompareFilter(final CompareOp compareOp, -089 final ByteArrayComparable comparator) { -090 this(CompareOperator.valueOf(compareOp.name()), comparator); -091 } -092 -093 /** -094 * Constructor. -095 * @param op the compare op for row matching -096 * @param comparator the comparator for row matching -097 */ -098 public CompareFilter(final CompareOperator op, -099 final ByteArrayComparable comparator) { -100this.op = op; -101this.comparator = comparator; -102 } -103 -104 /** -105 * @return operator -106 * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. -107 */ -108 @Deprecated -109 public CompareOp getOperator() { -110return CompareOp.valueOf(op.name()); -111 } -112 -113 public CompareOperator getCompareOperator() { -114return op; +046 * To filter by column qualifier, use {@link QualifierFilter}. +047 *

+048 * To filter by value, use {@link ValueFilter}. +049 *

+050 * These filters can be wrapped with {@link SkipFilter} and {@link WhileMatchFilter} +051 * to add more control. +052 *

+053 * Multiple filters can be combined using {@link FilterList}. +054 */ +055@InterfaceAudience


[19/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index bb5c354..73aa8fe 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -105,14 +105,14 @@
 
 org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp
 
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.DelayedProcedure
+org.apache.hadoop.hbase.procedure2.DelayedProcedure
 org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.BufferNode 
(implements org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteNode)
 org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.DelayedTask
 
 
 
 
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.InlineChore (implements 
java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
+org.apache.hadoop.hbase.procedure2.InlineChore (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
 
 org.apache.hadoop.hbase.procedure2.ProcedureExecutor.WorkerMonitor
 
@@ -146,13 +146,17 @@
 org.apache.hadoop.hbase.procedure2.RootProcedureState
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
 
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.StoppableThread
+org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.TimeoutExecutorThread
+org.apache.hadoop.hbase.procedure2.StoppableThread
+
+org.apache.hadoop.hbase.procedure2.ProcedureExecutor.WorkerThread
 
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.TimeoutExecutorThread
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.WorkerThread
+org.apache.hadoop.hbase.procedure2.ProcedureExecutor.KeepAliveWorkerThread
+
+
+org.apache.hadoop.hbase.procedure2.TimeoutExecutorThread
 
 
-org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.TimeoutExecutorThread
 
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable (implements java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
@@ -208,11 +212,11 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.procedure2.Procedure.LockState
-org.apache.hadoop.hbase.procedure2.LockType
-org.apache.hadoop.hbase.procedure2.LockedResourceType
 org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
+org.apache.hadoop.hbase.procedure2.LockType
+org.apache.hadoop.hbase.procedure2.Procedure.LockState
+org.apache.hadoop.hbase.procedure2.LockedResourceType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/package-use.html
index aefd9d8..775f6eb 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-use.html
@@ -482,6 +482,14 @@
 BadProcedureException 
 
 
+DelayedProcedure 
+
+
+InlineChore
+Inline Chores (executors internal chores).
+
+
+
 LockedResource 
 
 
@@ -519,37 +527,28 @@
 ProcedureException 
 
 
-ProcedureExecutor.CompletedProcedureRetainer 
+ProcedureExecutor
+Thread Pool that executes the submitted procedures.
+
 
 
-ProcedureExecutor.DelayedProcedure 
+ProcedureExecutor.CompletedProcedureRetainer 
 
 
-ProcedureExecutor.InlineChore 
-
-
 ProcedureExecutor.ProcedureExecutorListener 
 
-
-ProcedureExecutor.StoppableThread 
-
 
 ProcedureExecutor.Testing 
 
 
-ProcedureExecutor.TimeoutExecutorThread
-Runs task on a period such as check for stuck workers.
-
-
-
 ProcedureExecutor.WorkerThread 
 
-
+
 ProcedureInMemor

[46/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 0498ba0..53706b4 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -227,11 +227,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 class 
+ColumnValueFilter
+Different from SingleColumnValueFilter which 
returns an entire row
+ when specified condition is matched, ColumnValueFilter return the 
matched cell only.
+
+
+
+class 
 CompareFilter
 This is a generic filter to be used to filter by 
comparison.
 
 
-
+
 class 
 DependentColumnFilter
 A filter for adding inter-column timestamp matching
@@ -241,20 +248,20 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  full rows for correct filtering
 
 
-
+
 class 
 FamilyFilter
 
  This filter is used to filter based on the column family.
 
 
-
+
 class 
 org.apache.hadoop.hbase.filter.FilterBase
 Abstract base class to help you implement new Filters.
 
 
-
+
 class 
 FilterList
 Implementation of Filter that 
represents an ordered List of Filters which will be
@@ -262,13 +269,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  FilterList.Operator.MUST_PASS_ONE
 (OR).
 
 
-
+
 class 
 FirstKeyOnlyFilter
 A filter that will only return the first KV from each 
row.
 
 
-
+
 class 
 FirstKeyValueMatchingQualifiersFilter
 Deprecated. 
@@ -276,103 +283,103 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-
+
 class 
 FuzzyRowFilter
 This is optimized version of a standard FuzzyRowFilter 
Filters data based on fuzzy row key.
 
 
-
+
 class 
 InclusiveStopFilter
 A Filter that stops after the given row.
 
 
-
+
 class 
 KeyOnlyFilter
 A filter that will only return the key component of each KV 
(the value will
  be rewritten as empty).
 
 
-
+
 class 
 MultipleColumnPrefixFilter
 This filter is used for selecting only those keys with 
columns that matches
  a particular prefix.
 
 
-
+
 class 
 MultiRowRangeFilter
 Filter to support scan multiple row key ranges.
 
 
-
+
 class 
 PageFilter
 Implementation of Filter interface that limits results to a 
specific page
  size.
 
 
-
+
 class 
 PrefixFilter
 Pass results that have same row prefix.
 
 
-
+
 class 
 QualifierFilter
 This filter is used to filter based on the column 
qualifier.
 
 
-
+
 class 
 RandomRowFilter
 A filter that includes rows based on a chance.
 
 
-
+
 class 
 RowFilter
 This filter is used to filter based on the key.
 
 
-
+
 class 
 SingleColumnValueExcludeFilter
 A Filter that 
checks a single column value, but does not emit the
  tested column.
 
 
-
+
 class 
 SingleColumnValueFilter
 This filter is used to filter cells based on value.
 
 
-
+
 class 
 SkipFilter
 A wrapper filter that filters an entire row if any of the 
Cell checks do
  not pass.
 
 
-
+
 class 
 TimestampsFilter
 Filter that returns only cells whose timestamp (version) is
  in the specified list of timestamps (versions).
 
 
-
+
 class 
 ValueFilter
 This filter is used to filter based on column value.
 
 
-
+
 class 
 WhileMatchFilter
 A wrapper filter that returns true from WhileMatchFilter.filterAllRemaining()
 as soon
@@ -450,54 +457,60 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
+Creating this filter by reflection, it is used by ParseFilter,
+
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 

[18/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ClusterConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ClusterConnection.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ClusterConnection.html
index 25fa087..c1bc8ac 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/ClusterConnection.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/ClusterConnection.html
@@ -29,332 +29,312 @@
 021
 022import java.io.IOException;
 023import java.util.List;
-024
-025import 
org.apache.hadoop.conf.Configuration;
-026import 
org.apache.hadoop.hbase.HRegionLocation;
-027import 
org.apache.hadoop.hbase.MasterNotRunningException;
-028import 
org.apache.hadoop.hbase.RegionLocations;
-029import 
org.apache.hadoop.hbase.ServerName;
-030import 
org.apache.hadoop.hbase.TableName;
-031import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-032import 
org.apache.yetus.audience.InterfaceAudience;
-033import 
org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
-034import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+024import 
org.apache.hadoop.conf.Configuration;
+025import 
org.apache.hadoop.hbase.HRegionLocation;
+026import 
org.apache.hadoop.hbase.MasterNotRunningException;
+027import 
org.apache.hadoop.hbase.RegionLocations;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
+031import 
org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
+032import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+033import 
org.apache.yetus.audience.InterfaceAudience;
+034
 035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
-038
-039/** Internal methods on Connection that 
should not be used by user code. */
-040@InterfaceAudience.Private
-041// NOTE: Although this class is public, 
this class is meant to be used directly from internal
-042// classes and unit tests only.
-043public interface ClusterConnection 
extends Connection {
-044
-045  /**
-046   * Key for configuration in 
Configuration whose value is the class we implement making a
-047   * new Connection instance.
-048   */
-049  String HBASE_CLIENT_CONNECTION_IMPL = 
"hbase.client.connection.impl";
-050
-051  /**
-052   * @return - true if the master server 
is running
-053   * @deprecated this has been deprecated 
without a replacement
-054   */
-055  @Deprecated
-056  boolean isMasterRunning()
-057  throws MasterNotRunningException, 
ZooKeeperConnectionException;
-058
-059  /**
-060   * Use this api to check if the table 
has been created with the specified number of
-061   * splitkeys which was used while 
creating the given table.
-062   * Note : If this api is used after a 
table's region gets splitted, the api may return
-063   * false.
-064   * @param tableName
-065   *  tableName
-066   * @param splitKeys
-067   *  splitKeys used while 
creating table
-068   * @throws IOException
-069   *   if a remote or network 
exception occurs
-070   */
-071  boolean isTableAvailable(TableName 
tableName, byte[][] splitKeys) throws
-072  IOException;
-073
-074  /**
-075   * A table that isTableEnabled == false 
and isTableDisabled == false
-076   * is possible. This happens when a 
table has a lot of regions
-077   * that must be processed.
-078   * @param tableName table name
-079   * @return true if the table is 
enabled, false otherwise
-080   * @throws IOException if a remote or 
network exception occurs
-081   */
-082  boolean isTableEnabled(TableName 
tableName) throws IOException;
-083
-084  /**
-085   * @param tableName table name
-086   * @return true if the table is 
disabled, false otherwise
-087   * @throws IOException if a remote or 
network exception occurs
-088   */
-089  boolean isTableDisabled(TableName 
tableName) throws IOException;
-090
-091  /**
-092   * Retrieve TableState, represent 
current table state.
-093   * @param tableName table state for
-094   * @return state of the table
-095   */
-096  TableState getTableState(TableName 
tableName)  throws IOException;
-097
-098  /**
-099   * Find the location of the region of 
tableName that row
-100   * lives in.
-101   * @param tableName name of the table 
row is in
-102   * @param row row key you're trying to 
find the region of
-103   * @return HRegionLocation that 
describes where to find the region in
-104   *   question
-105   * @throws IOException if a remote or 
network exception occurs
-106   */
-107  HRegionLocation locateRegion(final 
TableName tableName,
-108  final byte [] row) throws 
IOException;
-109

[48/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
new file mode 100644
index 000..b49c3db
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
@@ -0,0 +1,647 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+ColumnValueFilter (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.filter
+Class ColumnValueFilter
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.filter.Filter
+
+
+org.apache.hadoop.hbase.filter.FilterBase
+
+
+org.apache.hadoop.hbase.filter.ColumnValueFilter
+
+
+
+
+
+
+
+
+
+
+
+
+@InterfaceAudience.Public
+public class ColumnValueFilter
+extends org.apache.hadoop.hbase.filter.FilterBase
+Different from SingleColumnValueFilter which 
returns an entire row
+ when specified condition is matched, ColumnValueFilter return the 
matched cell only.
+ 
+ This filter is used to filter cells based on column and value.
+ It takes a CompareOperator operator (<, <=, 
=, !=, >, >=), and
+ and a ByteArrayComparable 
comparator.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.filter.Filter
+Filter.ReturnCode
+
+
+
+
+
+
+
+
+Field Summary
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.filter.Filter
+reversed
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+ColumnValueFilter(byte[] family,
+ byte[] qualifier,
+ CompareOperator op,
+ byte[] value) 
+
+
+ColumnValueFilter(byte[] family,
+ byte[] qualifier,
+ CompareOperator op,
+ ByteArrayComparable comparator) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+static Filter
+createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
+Creating this filter by reflection, it is used by ParseFilter,
+
+
+
+Filter.ReturnCode
+filterCell(Cell c)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
+
+
+boolean
+filterRowKey(Cell cell)
+Filters a row based on the row key.
+
+
+
+ByteArrayComparable
+getComparator() 
+
+
+CompareOperator
+getCompareOperator() 
+
+
+byte[]
+getFamily() 
+
+
+byte[]
+getQualifier() 
+
+
+boolean
+isFamilyEssential(byte[] name)
+By default, we require all scan's column families to be 
present.
+
+
+
+static ColumnValueFilter
+parseFrom(byte[] pbBytes)
+Parse protobuf bytes to a ColumnValueFilter
+
+
+
+void
+reset()
+Filters that are purely stateless and do nothing in their 
reset() methods can inherit
+ this null/empty implementation.
+
+
+
+byte[]
+toByteArray()
+Return length 0 byte array for Filters that don't require 
special serialization
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+toString()
+Return filter's info for debugging and logging 
purpose.
+
+
+
+
+
+
+
+Methods inherited from 
class org.apache.hadoop.hbase.filter.FilterBase
+filterAllRemaining, filterRow, filterRowCells, filterRowKey, 
getNextCellHint, hasFilterRow, transformCell
+
+
+
+
+
+Methods inherited from class org.apache.hadoop.hbase.filter.Filter
+filterKeyValue,
 isReversed,
 setReversed
+
+
+
+
+
+Methods 

[50/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index 24580b4..66dafbe 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -605,55 +605,49 @@
 
 
 
-org.apache.hadoop.hbase.filter.ParseFilter.createCompareOperator(byte[])
-Since 2.0
- 
-
-
-
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], byte[], boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], byte[], boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], byte[], int, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], byte[], int, boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], long, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], long, boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], long, int, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], long, int, boolean).
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.createRegionName(TableName,
 byte[], String, boolean)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.createRegionName(TableName,
 byte[], String, boolean).
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  This will be removed in HBase 3.0.0.
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteSnapshots(String)
 since 2.0 version and will 
be removed in 3.0 version.
  Use Admin.deleteSnapshots(Pattern)
 instead.
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteTables(Pattern)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -661,7 +655,7 @@
  and Admin.deleteTable(TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteTables(String)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -669,25 +663,25 @@
  and Admin.deleteTable(TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.deleteTableSnapshots(String,
 String)
 since 2.0 version and will 
be removed in 3.0 version.
  Use Admin.deleteTableSnapshots(Pattern,
 Pattern) instead.
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disablePeer(String)
 use Admin.disableReplicationPeer(String)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disableTableRep(TableName)
 use Admin.disableTableReplication(TableName)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.disableTables(Pattern)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -695,7 +689,7 @@
  and Admin.disableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.disableTables(String)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -703,25 +697,25 @@
  and Admin.disableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.enableCatalogJanitor(boolean)
 Since 2.0.0. Will be 
removed in 3.0.0. Use Admin.catalogJanitorSwitch(boolean)}
  instead.
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enablePeer(String)
 use Admin.enableReplicationPeer(String)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enableTableRep(TableName)
 use Admin.enableTableReplication(TableName)
  instead
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.enableTables(Pattern)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -729,7 +723,7 @@
  and Admin.enableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.enableTables(String)
 since 2.0 version and will 
be removed in 3.0 version
  This is just a trivial helper method without any magic.
@@ -737,1457 +731,1457 @@
  and Admin.enableTable(org.apache.hadoop.hbase.TableName)
 
 
-
+
 org.apache.hadoop.hbase.HRegionInfo.encodeRegionName(byte[])
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use RegionInfo.encodeRegionName(byte[]).
 
 
-
+
 org.apache.hadoop.hbase.client.Increment.equals(Obje

[15/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.ServerErrors.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.ServerErrors.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.ServerErrors.html
index 93f650f..d7aa8b1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.ServerErrors.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.ServerErrors.html
@@ -546,1472 +546,1464 @@
 538return this.conf;
 539  }
 540
-541  /**
-542   * @return true if the master is 
running, throws an exception otherwise
-543   * @throws 
org.apache.hadoop.hbase.MasterNotRunningException - if the master is not 
running
-544   * @deprecated this has been deprecated 
without a replacement
-545   */
-546  @Deprecated
-547  @Override
-548  public boolean isMasterRunning()
-549  throws MasterNotRunningException, 
ZooKeeperConnectionException {
-550// When getting the master 
connection, we check it's running,
-551// so if there is no exception, it 
means we've been able to get a
-552// connection on a running master
-553MasterKeepAliveConnection m = 
getKeepAliveMasterService();
-554m.close();
-555return true;
-556  }
-557
-558  @Override
-559  public HRegionLocation 
getRegionLocation(final TableName tableName,
-560  final byte [] row, boolean 
reload)
-561  throws IOException {
-562return reload? 
relocateRegion(tableName, row): locateRegion(tableName, row);
-563  }
-564
-565
-566  @Override
-567  public boolean isTableEnabled(TableName 
tableName) throws IOException {
-568return 
getTableState(tableName).inStates(TableState.State.ENABLED);
-569  }
-570
-571  @Override
-572  public boolean 
isTableDisabled(TableName tableName) throws IOException {
-573return 
getTableState(tableName).inStates(TableState.State.DISABLED);
-574  }
-575
-576  @Override
-577  public boolean isTableAvailable(final 
TableName tableName, @Nullable final byte[][] splitKeys)
-578  throws IOException {
-579if (this.closed) {
-580  throw new IOException(toString() + 
" closed");
-581}
-582try {
-583  if (!isTableEnabled(tableName)) {
-584LOG.debug("Table " + tableName + 
" not enabled");
-585return false;
-586  }
-587  List> locations =
-588
MetaTableAccessor.getTableRegionsAndLocations(this, tableName, true);
-589
-590  int notDeployed = 0;
-591  int regionCount = 0;
-592  for (Pair pair : locations) {
-593RegionInfo info = 
pair.getFirst();
-594if (pair.getSecond() == null) {
-595  if (LOG.isDebugEnabled()) {
-596LOG.debug("Table " + 
tableName + " has not deployed region " + pair.getFirst()
-597.getEncodedName());
-598  }
-599  notDeployed++;
-600} else if (splitKeys != null
-601&& 
!Bytes.equals(info.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
-602  for (byte[] splitKey : 
splitKeys) {
-603// Just check if the splitkey 
is available
-604if 
(Bytes.equals(info.getStartKey(), splitKey)) {
-605  regionCount++;
-606  break;
-607}
-608  }
-609} else {
-610  // Always empty start row 
should be counted
-611  regionCount++;
-612}
-613  }
-614  if (notDeployed > 0) {
-615if (LOG.isDebugEnabled()) {
-616  LOG.debug("Table " + tableName 
+ " has " + notDeployed + " regions");
-617}
-618return false;
-619  } else if (splitKeys != null 
&& regionCount != splitKeys.length + 1) {
-620if (LOG.isDebugEnabled()) {
-621  LOG.debug("Table " + tableName 
+ " expected to have " + (splitKeys.length + 1)
-622  + " regions, but only " + 
regionCount + " available");
-623}
-624return false;
-625  } else {
-626if (LOG.isDebugEnabled()) {
-627  LOG.debug("Table " + tableName 
+ " should be available");
-628}
-629return true;
-630  }
-631} catch (TableNotFoundException tnfe) 
{
-632  LOG.warn("Table " + tableName + " 
not enabled, it is not exists");
-633  return false;
-634}
-635  }
-636
-637  @Override
-638  public HRegionLocation 
locateRegion(final byte[] regionName) throws IOException {
-639RegionLocations locations = 
locateRegion(RegionInfo.getTable(regionName),
-640  RegionInfo.getStartKey(regionName), 
false, true);
-641return locations == null ? null : 
locations.getRegionLocation();
+541  private void checkClosed() throws 
DoNotRetryIOException {
+542if (th

[44/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
index e5fdac5..ad7c82a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -33,299 +33,303 @@
 025import org.apache.hadoop.hbase.Cell;
 026import 
org.apache.hadoop.hbase.CompareOperator;
 027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-036/**
-037 * This is a generic filter to be used to 
filter by comparison.  It takes an
-038 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-039 * 

-040 * To filter by row key, use {@link RowFilter}. +028import org.apache.hadoop.hbase.util.Bytes; +029import org.apache.yetus.audience.InterfaceAudience; +030 +031import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +032 +033import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +034import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; +035import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; +036import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; +037 +038/** +039 * This is a generic filter to be used to filter by comparison. It takes an +040 * operator (equal, greater, not equal, etc) and a byte [] comparator. 041 *

-042 * To filter by column qualifier, use {@link QualifierFilter}. +042 * To filter by row key, use {@link RowFilter}. 043 *

-044 * To filter by value, use {@link SingleColumnValueFilter}. +044 * To filter by column family, use {@link FamilyFilter}. 045 *

-046 * These filters can be wrapped with {@link SkipFilter} and {@link WhileMatchFilter} -047 * to add more control. -048 *

-049 * Multiple filters can be combined using {@link FilterList}. -050 */ -051@InterfaceAudience.Public -052public abstract class CompareFilter extends FilterBase { -053 /** -054 * Comparison operators. For filters only! -055 * Use {@link CompareOperator} otherwise. -056 * It (intentionally) has at least the below enums with same names. -057 * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link CompareOperator} instead. -058 */ -059 @Deprecated -060 @InterfaceAudience.Public -061 public enum CompareOp { -062/** less than */ -063LESS, -064/** less than or equal to */ -065LESS_OR_EQUAL, -066/** equals */ -067EQUAL, -068/** not equal */ -069NOT_EQUAL, -070/** greater than or equal to */ -071GREATER_OR_EQUAL, -072/** greater than */ -073GREATER, -074/** no operation */ -075NO_OP, -076 } -077 -078 protected CompareOperator op; -079 protected ByteArrayComparable comparator; -080 -081 /** -082 * Constructor. -083 * @param compareOp the compare op for row matching -084 * @param comparator the comparator for row matching -085 * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use other constructor. -086 */ -087 @Deprecated -088 public CompareFilter(final CompareOp compareOp, -089 final ByteArrayComparable comparator) { -090 this(CompareOperator.valueOf(compareOp.name()), comparator); -091 } -092 -093 /** -094 * Constructor. -095 * @param op the compare op for row matching -096 * @param comparator the comparator for row matching -097 */ -098 public CompareFilter(final CompareOperator op, -099 final ByteArrayComparable comparator) { -100this.op = op; -101this.comparator = comparator; -102 } -103 -104 /** -105 * @return operator -106 * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. -107 */ -108 @Deprecated -109 public CompareOp getOperator() { -110return CompareOp.valueOf(op.name()); -111 } -112 -113 public CompareOperator getCompareOperator() { -114return op; +046 * To filter by column qualifier, use {@link QualifierFilter}. +047 *

+048 * To filter by value, use {@link ValueFilter}. +049 *

+050 * These filters can be wrapped with {@link SkipFilter} and {@link WhileMatchFilter} +051 * to add more control. +052 *

+053 * Multiple filters can be combined using {@link FilterList}. +054 */ +055@InterfaceAudience.Public +056publ


[41/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 0328956..b705f85 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -286,10 +286,10 @@
  Warnings
  Errors
 
-3544
+3550
 0
 0
-16537
+16524
 
 Files
 
@@ -1259,9056 +1259,9051 @@
 0
 2
 
-org/apache/hadoop/hbase/client/ClusterConnection.java
-0
-0
-1
-
 org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
 0
 0
 54
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/CompleteScanResultCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionFactory.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/ConnectionUtils.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
 72
-
+
 org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ImmutableHRegionInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
 0
-3
-
+2
+
 org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MetaCache.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/client/MultiAction.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/MultiResponse.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Operation.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/Put.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/Query.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/QuotaStatusCalls.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 0
 0
 88
-
+
 org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannelImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/RegionInfo.java
 0
 0
 55
-
+
 org/apache/hadoop/hbase/client/RegionInfoBuilder.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/RegionInfoDisplay.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/client/RegionLocator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionReplicaUtil.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/RegionServerCallable.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/RegionServerCoprocessorRpcChannelImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/RequestController.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/RequestControllerFactory.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/Result.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/ResultScanner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RetriesExhaustedException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RetryingCallerInterceptor.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ReversedClientScanner.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/ReversedScannerCallable.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/RowAccess.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RowMutations.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCaller.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCallerWit

[05/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index 7edb3ff..665071c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -1221,2378 +1221,2377 @@
 1213
configurationManager.registerObserver(procEnv);
 1214
 1215int cpus = 
Runtime.getRuntime().availableProcessors();
-1216final int numThreads = 
conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS,
-1217Math.max((cpus > 0? cpus/4: 
0),
-1218
MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
-1219final boolean abortOnCorruption = 
conf.getBoolean(
-1220
MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION,
-1221
MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
-1222procedureStore.start(numThreads);
-1223procedureExecutor.start(numThreads, 
abortOnCorruption);
-1224
procEnv.getRemoteDispatcher().start();
-1225  }
-1226
-1227  private void stopProcedureExecutor() 
{
-1228if (procedureExecutor != null) {
-1229  
configurationManager.deregisterObserver(procedureExecutor.getEnvironment());
-1230  
procedureExecutor.getEnvironment().getRemoteDispatcher().stop();
-1231  procedureExecutor.stop();
-1232  procedureExecutor.join();
-1233  procedureExecutor = null;
-1234}
-1235
-1236if (procedureStore != null) {
-1237  
procedureStore.stop(isAborted());
-1238  procedureStore = null;
-1239}
-1240  }
-1241
-1242  private void stopChores() {
-1243if (this.expiredMobFileCleanerChore 
!= null) {
-1244  
this.expiredMobFileCleanerChore.cancel(true);
-1245}
-1246if (this.mobCompactChore != null) 
{
-1247  
this.mobCompactChore.cancel(true);
-1248}
-1249if (this.balancerChore != null) {
-1250  this.balancerChore.cancel(true);
-1251}
-1252if (this.normalizerChore != null) 
{
-1253  
this.normalizerChore.cancel(true);
-1254}
-1255if (this.clusterStatusChore != null) 
{
-1256  
this.clusterStatusChore.cancel(true);
-1257}
-1258if (this.catalogJanitorChore != 
null) {
-1259  
this.catalogJanitorChore.cancel(true);
-1260}
-1261if (this.clusterStatusPublisherChore 
!= null){
-1262  
clusterStatusPublisherChore.cancel(true);
-1263}
-1264if (this.mobCompactThread != null) 
{
-1265  this.mobCompactThread.close();
-1266}
-1267
-1268if (this.quotaObserverChore != null) 
{
-1269  quotaObserverChore.cancel();
-1270}
-1271if (this.snapshotQuotaChore != null) 
{
-1272  snapshotQuotaChore.cancel();
-1273}
-1274  }
-1275
-1276  /**
-1277   * @return Get remote side's 
InetAddress
-1278   */
-1279  InetAddress getRemoteInetAddress(final 
int port,
-1280  final long serverStartCode) throws 
UnknownHostException {
-1281// Do it out here in its own little 
method so can fake an address when
-1282// mocking up in tests.
-1283InetAddress ia = 
RpcServer.getRemoteIp();
-1284
-1285// The call could be from the local 
regionserver,
-1286// in which case, there is no remote 
address.
-1287if (ia == null && 
serverStartCode == startcode) {
-1288  InetSocketAddress isa = 
rpcServices.getSocketAddress();
-1289  if (isa != null && 
isa.getPort() == port) {
-1290ia = isa.getAddress();
-1291  }
-1292}
-1293return ia;
-1294  }
-1295
-1296  /**
-1297   * @return Maximum time we should run 
balancer for
-1298   */
-1299  private int getMaxBalancingTime() {
-1300int maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_MAX_BALANCING, -1);
-1301if (maxBalancingTime == -1) {
-1302  // if max balancing time isn't 
set, defaulting it to period time
-1303  maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_PERIOD,
-1304
HConstants.DEFAULT_HBASE_BALANCER_PERIOD);
-1305}
-1306return maxBalancingTime;
-1307  }
-1308
-1309  /**
-1310   * @return Maximum number of regions 
in transition
-1311   */
-1312  private int 
getMaxRegionsInTransition() {
-1313int numRegions = 
this.assignmentManager.getRegionStates().getRegionAssignments().size();
-1314return Math.max((int) 
Math.floor(numRegions * this.maxRitPercent), 1);
-1315  }
-1316
-1317  /**
-1318   * It first sleep to the next balance 
plan start time. Meanwhile, throttling by the max
-1319   * number regions in transition to 
protect availability.
-1320   * @param nextBalanceStartTime The 
next balance plan start time
-1321   * @param maxRegionsInTransition max 
number of region

[13/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
index 93f650f..d7aa8b1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.html
@@ -546,1472 +546,1464 @@
 538return this.conf;
 539  }
 540
-541  /**
-542   * @return true if the master is 
running, throws an exception otherwise
-543   * @throws 
org.apache.hadoop.hbase.MasterNotRunningException - if the master is not 
running
-544   * @deprecated this has been deprecated 
without a replacement
-545   */
-546  @Deprecated
-547  @Override
-548  public boolean isMasterRunning()
-549  throws MasterNotRunningException, 
ZooKeeperConnectionException {
-550// When getting the master 
connection, we check it's running,
-551// so if there is no exception, it 
means we've been able to get a
-552// connection on a running master
-553MasterKeepAliveConnection m = 
getKeepAliveMasterService();
-554m.close();
-555return true;
-556  }
-557
-558  @Override
-559  public HRegionLocation 
getRegionLocation(final TableName tableName,
-560  final byte [] row, boolean 
reload)
-561  throws IOException {
-562return reload? 
relocateRegion(tableName, row): locateRegion(tableName, row);
-563  }
-564
-565
-566  @Override
-567  public boolean isTableEnabled(TableName 
tableName) throws IOException {
-568return 
getTableState(tableName).inStates(TableState.State.ENABLED);
-569  }
-570
-571  @Override
-572  public boolean 
isTableDisabled(TableName tableName) throws IOException {
-573return 
getTableState(tableName).inStates(TableState.State.DISABLED);
-574  }
-575
-576  @Override
-577  public boolean isTableAvailable(final 
TableName tableName, @Nullable final byte[][] splitKeys)
-578  throws IOException {
-579if (this.closed) {
-580  throw new IOException(toString() + 
" closed");
-581}
-582try {
-583  if (!isTableEnabled(tableName)) {
-584LOG.debug("Table " + tableName + 
" not enabled");
-585return false;
-586  }
-587  List> locations =
-588
MetaTableAccessor.getTableRegionsAndLocations(this, tableName, true);
-589
-590  int notDeployed = 0;
-591  int regionCount = 0;
-592  for (Pair pair : locations) {
-593RegionInfo info = 
pair.getFirst();
-594if (pair.getSecond() == null) {
-595  if (LOG.isDebugEnabled()) {
-596LOG.debug("Table " + 
tableName + " has not deployed region " + pair.getFirst()
-597.getEncodedName());
-598  }
-599  notDeployed++;
-600} else if (splitKeys != null
-601&& 
!Bytes.equals(info.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
-602  for (byte[] splitKey : 
splitKeys) {
-603// Just check if the splitkey 
is available
-604if 
(Bytes.equals(info.getStartKey(), splitKey)) {
-605  regionCount++;
-606  break;
-607}
-608  }
-609} else {
-610  // Always empty start row 
should be counted
-611  regionCount++;
-612}
-613  }
-614  if (notDeployed > 0) {
-615if (LOG.isDebugEnabled()) {
-616  LOG.debug("Table " + tableName 
+ " has " + notDeployed + " regions");
-617}
-618return false;
-619  } else if (splitKeys != null 
&& regionCount != splitKeys.length + 1) {
-620if (LOG.isDebugEnabled()) {
-621  LOG.debug("Table " + tableName 
+ " expected to have " + (splitKeys.length + 1)
-622  + " regions, but only " + 
regionCount + " available");
-623}
-624return false;
-625  } else {
-626if (LOG.isDebugEnabled()) {
-627  LOG.debug("Table " + tableName 
+ " should be available");
-628}
-629return true;
-630  }
-631} catch (TableNotFoundException tnfe) 
{
-632  LOG.warn("Table " + tableName + " 
not enabled, it is not exists");
-633  return false;
-634}
-635  }
-636
-637  @Override
-638  public HRegionLocation 
locateRegion(final byte[] regionName) throws IOException {
-639RegionLocations locations = 
locateRegion(RegionInfo.getTable(regionName),
-640  RegionInfo.getStartKey(regionName), 
false, true);
-641return locations == null ? null : 
locations.getRegionLocation();
+541  private void checkClosed() throws 
DoNotRetryIOException {
+542if (this.closed) {
+543  throw new 
DoNotRetryIOException(toString() + " closed");
+544}
+545  }
+546
+547  /**
+548   * @return true if the master is 
runnin

[20/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
index e901330..d15170c 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
@@ -844,7 +844,7 @@
 
 
 
-private Procedure
+private Procedure
 ProcedureExecutor.WorkerThread.activeProcedure 
 
 
@@ -1070,7 +1070,7 @@
 
 
 void
-ProcedureExecutor.TimeoutExecutorThread.add(Procedure procedure) 
+TimeoutExecutorThread.add(Procedure procedure) 
 
 
 void
@@ -1172,7 +1172,7 @@
 
 
 private void
-ProcedureExecutor.TimeoutExecutorThread.executeTimedoutProcedure(Procedure proc) 
+TimeoutExecutorThread.executeTimedoutProcedure(Procedure proc) 
 
 
 protected static http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
@@ -1180,7 +1180,7 @@
   Procedure proc) 
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+(package private) http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 ProcedureExecutor.getRootProcedureId(Procedure proc) 
 
 
@@ -1267,7 +1267,7 @@
 
 
 boolean
-ProcedureExecutor.TimeoutExecutorThread.remove(Procedure procedure) 
+TimeoutExecutorThread.remove(Procedure procedure) 
 
 
 private void
@@ -1371,7 +1371,7 @@
 CompletedProcedureRetainer(Procedure procedure) 
 
 
-DelayedProcedure(Procedure procedure) 
+DelayedProcedure(Procedure procedure) 
 
 
 LockedResource(LockedResourceType resourceType,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.DelayedProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.DelayedProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.DelayedProcedure.html
deleted file mode 100644
index 222fa56..000
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.DelayedProcedure.html
+++ /dev/null
@@ -1,165 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-Uses of Class 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor.DelayedProcedure (Apache 
HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.procedure2.ProcedureExecutor.DelayedProcedure
-
-
-
-
-
-Packages that use ProcedureExecutor.DelayedProcedure 
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.procedure2
- 
-
-
-
-
-
-
-
-
-
-Uses of ProcedureExecutor.DelayedProcedure in 
org.apache.hadoop.hbase.procedure2
-
-Methods in org.apache.hadoop.hbase.procedure2
 with parameters of type ProcedureExecutor.DelayedProcedure 
-
-Modifier and Type
-Method and Description
-
-
-
-private void
-ProcedureExecutor.TimeoutExecutorThread.execDelayedProcedure(ProcedureExecutor.DelayedProcedure delayed) 
-
-
-
-
-
-
-
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-
-
-Copyright © 2007–2018 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.InlineChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.InlineChore.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2

[37/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
index 6ba3a1e..8a644ba 100644
--- a/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
@@ -122,7 +122,7 @@
 
 
 Direct Known Subclasses:
-BackupException, BadProcedureException, 
CallCancelledException, CallTimeoutException, CellScannerButNoCodecException, ClusterSchemaException, CodecException, CorruptedWALProcedureStoreException,
 DamagedWALException, DoNotRetryIOException, FailedRemoteDispatchException, 
FailedServerException, FallbackDisallowedException, LeaseNotRecoveredException, 
NoSuchProcedureException, 
PleaseHoldException, RegionException, ServerCrashException, Serv
 iceNotRunningException, StoppedRpcClientException, TableInfoMissingException, UnexpectedStateException, WrongRowIOException
+BackupException, BadProcedureException, 
CallCancelledException, CallTimeoutException, CellScannerButNoCodecException, ClusterSchemaException, CodecException, CorruptedWALProcedureStoreException,
 DamagedWALException, DoNotRetryIOException, FailedRemoteDispatchException, 
FailedServerException, FallbackDisallowedException, LeaseNotRecoveredException, 
MasterNotRunningException, NoSuchProcedureException, 
PleaseHoldException, RegionException, 
 ServerCrashException, ServiceNotRunningException, StoppedRpcClientException, TableInfoMissingException, UnexpectedStateException, WrongRowIOException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/MasterNotRunningException.html 
b/devapidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
index 26d9684..f3a326d 100644
--- a/devapidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
+++ b/devapidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
@@ -103,6 +103,9 @@
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">java.io.IOException
 
 
+org.apache.hadoop.hbase.HBaseIOException
+
+
 org.apache.hadoop.hbase.MasterNotRunningException
 
 
@@ -113,6 +116,8 @@
 
 
 
+
+
 
 
 
@@ -123,8 +128,8 @@
 
 
 @InterfaceAudience.Public
-public class MasterNotRunningException
-extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+public class MasterNotRunningException
+extends HBaseIOException
 Thrown if the master is not running
 
 See Also:
@@ -228,7 +233,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 serialVersionUID
-private static final long serialVersionUID
+private static final long serialVersionUID
 
 See Also:
 Constant
 Field Values
@@ -249,7 +254,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 MasterNotRunningException
-public MasterNotRunningException()
+public MasterNotRunningException()
 default constructor
 
 
@@ -259,7 +264,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 MasterNotRunningException
-public MasterNotRunningException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String s)
+public MasterNotRunningException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String s)
 Constructor
 
 Parameters:
@@ -273,7 +278,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 MasterNotRunningException
-public MasterNotRunningException(http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception e)
+public MasterNotRunningException(http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception e)
 Constructor taking another exception.
 
 Parameters:
@@ -287,7 +292,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 MasterNotRunningException
-public MasterNotRunningException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String s,
+public MasterNotRunningException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String s,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception e)
 
 

http://git

[17/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
index 93f650f..d7aa8b1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
@@ -546,1472 +546,1464 @@
 538return this.conf;
 539  }
 540
-541  /**
-542   * @return true if the master is 
running, throws an exception otherwise
-543   * @throws 
org.apache.hadoop.hbase.MasterNotRunningException - if the master is not 
running
-544   * @deprecated this has been deprecated 
without a replacement
-545   */
-546  @Deprecated
-547  @Override
-548  public boolean isMasterRunning()
-549  throws MasterNotRunningException, 
ZooKeeperConnectionException {
-550// When getting the master 
connection, we check it's running,
-551// so if there is no exception, it 
means we've been able to get a
-552// connection on a running master
-553MasterKeepAliveConnection m = 
getKeepAliveMasterService();
-554m.close();
-555return true;
-556  }
-557
-558  @Override
-559  public HRegionLocation 
getRegionLocation(final TableName tableName,
-560  final byte [] row, boolean 
reload)
-561  throws IOException {
-562return reload? 
relocateRegion(tableName, row): locateRegion(tableName, row);
-563  }
-564
-565
-566  @Override
-567  public boolean isTableEnabled(TableName 
tableName) throws IOException {
-568return 
getTableState(tableName).inStates(TableState.State.ENABLED);
-569  }
-570
-571  @Override
-572  public boolean 
isTableDisabled(TableName tableName) throws IOException {
-573return 
getTableState(tableName).inStates(TableState.State.DISABLED);
-574  }
-575
-576  @Override
-577  public boolean isTableAvailable(final 
TableName tableName, @Nullable final byte[][] splitKeys)
-578  throws IOException {
-579if (this.closed) {
-580  throw new IOException(toString() + 
" closed");
-581}
-582try {
-583  if (!isTableEnabled(tableName)) {
-584LOG.debug("Table " + tableName + 
" not enabled");
-585return false;
-586  }
-587  List> locations =
-588
MetaTableAccessor.getTableRegionsAndLocations(this, tableName, true);
-589
-590  int notDeployed = 0;
-591  int regionCount = 0;
-592  for (Pair pair : locations) {
-593RegionInfo info = 
pair.getFirst();
-594if (pair.getSecond() == null) {
-595  if (LOG.isDebugEnabled()) {
-596LOG.debug("Table " + 
tableName + " has not deployed region " + pair.getFirst()
-597.getEncodedName());
-598  }
-599  notDeployed++;
-600} else if (splitKeys != null
-601&& 
!Bytes.equals(info.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
-602  for (byte[] splitKey : 
splitKeys) {
-603// Just check if the splitkey 
is available
-604if 
(Bytes.equals(info.getStartKey(), splitKey)) {
-605  regionCount++;
-606  break;
-607}
-608  }
-609} else {
-610  // Always empty start row 
should be counted
-611  regionCount++;
-612}
-613  }
-614  if (notDeployed > 0) {
-615if (LOG.isDebugEnabled()) {
-616  LOG.debug("Table " + tableName 
+ " has " + notDeployed + " regions");
-617}
-618return false;
-619  } else if (splitKeys != null 
&& regionCount != splitKeys.length + 1) {
-620if (LOG.isDebugEnabled()) {
-621  LOG.debug("Table " + tableName 
+ " expected to have " + (splitKeys.length + 1)
-622  + " regions, but only " + 
regionCount + " available");
-623}
-624return false;
-625  } else {
-626if (LOG.isDebugEnabled()) {
-627  LOG.debug("Table " + tableName 
+ " should be available");
-628}
-629return true;
-630  }
-631} catch (TableNotFoundException tnfe) 
{
-632  LOG.warn("Table " + tableName + " 
not enabled, it is not exists");
-633  return false;
-634}
-635  }
-636
-637  @Override
-638  public HRegionLocation 
locateRegion(final byte[] regionName) throws IOException {
-639RegionLocations locations = 
locateRegion(RegionInfo.getTable(regionName),
-640  RegionInfo.getStartKey(regionName), 
false, true);
-641return locations == null ? null : 
locations.getRegionLocation();
+541  private void checkClosed() throws 
DoNotRetryIOException {
+542if (this.closed) {
+543  throw new 
DoNotRetryIOException(toString(

[11/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
index 4d03740..2f29cd8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
@@ -170,1134 +170,1131 @@
 162  final RpcRetryingCallerFactory 
rpcCallerFactory,
 163  final RpcControllerFactory 
rpcControllerFactory,
 164  final ExecutorService pool) {
-165if (connection == null || 
connection.isClosed()) {
-166  throw new 
IllegalArgumentException("Connection is null or closed.");
-167}
-168this.connection = connection;
-169this.configuration = 
connection.getConfiguration();
-170this.connConfiguration = 
connection.getConnectionConfiguration();
-171if (pool == null) {
-172  this.pool = 
getDefaultExecutor(this.configuration);
-173  this.cleanupPoolOnClose = true;
-174} else {
-175  this.pool = pool;
-176  this.cleanupPoolOnClose = false;
-177}
-178if (rpcCallerFactory == null) {
-179  this.rpcCallerFactory = 
connection.getNewRpcRetryingCallerFactory(configuration);
-180} else {
-181  this.rpcCallerFactory = 
rpcCallerFactory;
-182}
-183
-184if (rpcControllerFactory == null) {
-185  this.rpcControllerFactory = 
RpcControllerFactory.instantiate(configuration);
-186} else {
-187  this.rpcControllerFactory = 
rpcControllerFactory;
-188}
-189
-190this.tableName = builder.tableName;
-191this.operationTimeoutMs = 
builder.operationTimeout;
-192this.rpcTimeoutMs = 
builder.rpcTimeout;
-193this.readRpcTimeoutMs = 
builder.readRpcTimeout;
-194this.writeRpcTimeoutMs = 
builder.writeRpcTimeout;
-195this.scannerCaching = 
connConfiguration.getScannerCaching();
-196this.scannerMaxResultSize = 
connConfiguration.getScannerMaxResultSize();
-197
-198// puts need to track errors globally 
due to how the APIs currently work.
-199multiAp = 
this.connection.getAsyncProcess();
-200this.locator = new 
HRegionLocator(tableName, connection);
-201  }
-202
-203  /**
-204   * @return maxKeyValueSize from 
configuration.
-205   */
-206  public static int 
getMaxKeyValueSize(Configuration conf) {
-207return 
conf.getInt(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY, -1);
-208  }
-209
-210  @Override
-211  public Configuration getConfiguration() 
{
-212return configuration;
-213  }
-214
-215  @Override
-216  public TableName getName() {
-217return tableName;
-218  }
-219
-220  /**
-221   * INTERNAL Used 
by unit tests and tools to do low-level
-222   * manipulations.
-223   * @return A Connection instance.
-224   */
-225  @VisibleForTesting
-226  protected Connection getConnection() 
{
-227return this.connection;
-228  }
-229
-230  @Override
-231  @Deprecated
-232  public HTableDescriptor 
getTableDescriptor() throws IOException {
-233HTableDescriptor htd = 
HBaseAdmin.getHTableDescriptor(tableName, connection, rpcCallerFactory,
-234  rpcControllerFactory, 
operationTimeoutMs, readRpcTimeoutMs);
-235if (htd != null) {
-236  return new 
ImmutableHTableDescriptor(htd);
-237}
-238return null;
-239  }
-240
-241  @Override
-242  public TableDescriptor getDescriptor() 
throws IOException {
-243return 
HBaseAdmin.getTableDescriptor(tableName, connection, rpcCallerFactory,
-244  rpcControllerFactory, 
operationTimeoutMs, readRpcTimeoutMs);
-245  }
-246
-247  /**
-248   * Get the corresponding start keys and 
regions for an arbitrary range of
-249   * keys.
-250   * 

-251 * @param startKey Starting row in range, inclusive -252 * @param endKey Ending row in range -253 * @param includeEndKey true if endRow is inclusive, false if exclusive -254 * @return A pair of list of start keys and list of HRegionLocations that -255 * contain the specified range -256 * @throws IOException if a remote or network exception occurs -257 */ -258 private Pair, List> getKeysAndRegionsInRange( -259 final byte[] startKey, final byte[] endKey, final boolean includeEndKey) -260 throws IOException { -261return getKeysAndRegionsInRange(startKey, endKey, includeEndKey, false); -262 } -263 -264 /** -265 * Get the corresponding start keys and regions for an arbitrary range of -266 * keys. -267 *

-268 * @param startKey Starting row in range, inclusive -269 * @param endKey Ending row in range -270 * @param includeEndKey true if endRow is inclusive, false if exclusive -271 * @param reload true to reload information or false to


[49/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 4d63e79..0a024cb 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -1743,6 +1743,15 @@
 
 COLUMNS_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.mapreduce.ImportTsv
  
+ColumnValueFilter - Class in org.apache.hadoop.hbase.filter
+
+Different from SingleColumnValueFilter 
which returns an entire row
+ when specified condition is matched, ColumnValueFilter return the 
matched cell only.
+
+ColumnValueFilter(byte[],
 byte[], CompareOperator, byte[]) - Constructor for class 
org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
+ColumnValueFilter(byte[],
 byte[], CompareOperator, ByteArrayComparable) - Constructor for 
class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 com.google.protobuf 
- package com.google.protobuf
  
 combineTableNameSuffix(byte[],
 byte[]) - Static method in class 
org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
@@ -2728,10 +2737,7 @@
 
 createCompareOperator(byte[])
 - Static method in class org.apache.hadoop.hbase.filter.ParseFilter
 
-Deprecated.
-Since 2.0
- 
-
+Takes a compareOperator symbol as a byte array and returns 
the corresponding CompareOperator
 
 createCompleteResult(Iterable)
 - Static method in class org.apache.hadoop.hbase.client.Result
 
@@ -2811,6 +2817,10 @@
  
 createFilterFromArguments(ArrayList)
 - Static method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
  
+createFilterFromArguments(ArrayList)
 - Static method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+
+Creating this filter by reflection, it is used by ParseFilter,
+
 createFilterFromArguments(ArrayList)
 - Static method in class org.apache.hadoop.hbase.filter.DependentColumnFilter
  
 createFilterFromArguments(ArrayList)
 - Static method in class org.apache.hadoop.hbase.filter.FamilyFilter
@@ -5294,6 +5304,8 @@
  
 filterCell(Cell)
 - Method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
  
+filterCell(Cell)
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 filterCell(Cell)
 - Method in class org.apache.hadoop.hbase.filter.DependentColumnFilter
  
 filterCell(Cell)
 - Method in class org.apache.hadoop.hbase.filter.FamilyFilter
@@ -5530,6 +5542,8 @@
  
 filterRowKey(Cell)
 - Method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
  
+filterRowKey(Cell)
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 filterRowKey(Cell)
 - Method in class org.apache.hadoop.hbase.filter.CompareFilter
  
 filterRowKey(byte[],
 int, int) - Method in class org.apache.hadoop.hbase.filter.DependentColumnFilter
@@ -6263,6 +6277,8 @@
 
 getComparator(Comparator)
 - Static method in interface org.apache.hadoop.hbase.client.TableDescriptor
  
+getComparator()
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 getComparator()
 - Method in class org.apache.hadoop.hbase.filter.CompareFilter
  
 getComparator()
 - Method in class org.apache.hadoop.hbase.filter.SingleColumnValueFilter
@@ -6273,6 +6289,8 @@
 Use 
Region#getCellComparator().  deprecated for hbase 2.0, remove for hbase 
3.0
 
 
+getCompareOperator()
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 getCompareOperator()
 - Method in class org.apache.hadoop.hbase.filter.CompareFilter
  
 getCompareOperator()
 - Method in class org.apache.hadoop.hbase.filter.SingleColumnValueFilter
@@ -6681,6 +6699,8 @@
  Use HTableDescriptor.getColumnFamilyNames().
 
 
+getFamily()
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 getFamily()
 - Method in class org.apache.hadoop.hbase.filter.DependentColumnFilter
  
 getFamily()
 - Method in class org.apache.hadoop.hbase.filter.SingleColumnValueFilter
@@ -7700,6 +7720,8 @@
 
 Return the provider for this Cipher
 
+getQualifier()
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 getQualifier()
 - Method in class org.apache.hadoop.hbase.filter.DependentColumnFilter
  
 getQualifier()
 - Method in class org.apache.hadoop.hbase.filter.SingleColumnValueFilter
@@ -10695,6 +10717,8 @@
 
 Deprecated.
  
+isFamilyEssential(byte[])
 - Method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+ 
 isFamilyEssential(byte[])
 - Method in class org.apache.hadoop.hbase.filter.Filter
 
 Check that given column family is essential for filter to 
check row.
@@ -13372,6 +13396,10 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
  
 parseFrom(byte[])
 - Static method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
  
+parseFrom(byte[])
 - Static method in class org.apache.hadoop.hbase.filter.ColumnValueFilter
+
+Parse protobuf bytes to a ColumnValueFilter
+
 parseFrom(byte[])
 - Static method in class org.ap

[06/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
index 49de9ff..d98b2a6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
@@ -101,684 +101,684 @@
 093  
"SingleColumnValueExcludeFilter");
 094
filterHashMap.put("DependentColumnFilter", ParseConstants.FILTER_PACKAGE + "." 
+
 095  
"DependentColumnFilter");
-096
-097// Creates the 
operatorPrecedenceHashMap
-098operatorPrecedenceHashMap = new 
HashMap<>();
-099
operatorPrecedenceHashMap.put(ParseConstants.SKIP_BUFFER, 1);
-100
operatorPrecedenceHashMap.put(ParseConstants.WHILE_BUFFER, 1);
-101
operatorPrecedenceHashMap.put(ParseConstants.AND_BUFFER, 2);
-102
operatorPrecedenceHashMap.put(ParseConstants.OR_BUFFER, 3);
-103  }
-104
-105  /**
-106   * Parses the filterString and 
constructs a filter using it
-107   * 

-108 * @param filterString filter string given by the user -109 * @return filter object we constructed -110 */ -111 public Filter parseFilterString (String filterString) -112throws CharacterCodingException { -113return parseFilterString(Bytes.toBytes(filterString)); -114 } -115 -116 /** -117 * Parses the filterString and constructs a filter using it -118 *

-119 * @param filterStringAsByteArray filter string given by the user -120 * @return filter object we constructed -121 */ -122 public Filter parseFilterString (byte [] filterStringAsByteArray) -123throws CharacterCodingException { -124// stack for the operators and parenthesis -125Stack operatorStack = new Stack<>(); -126// stack for the filter objects -127Stack filterStack = new Stack<>(); -128 -129Filter filter = null; -130for (int i=0; i


[24/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.StoppableThread.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.StoppableThread.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.StoppableThread.html
deleted file mode 100644
index 8edc8be..000
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.StoppableThread.html
+++ /dev/null
@@ -1,341 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-ProcedureExecutor.StoppableThread (Apache HBase 3.0.0-SNAPSHOT 
API)
-
-
-
-
-
-var methods = {"i0":10,"i1":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.procedure2
-Class 
ProcedureExecutor.StoppableThread
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">java.lang.Thread
-
-
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.StoppableThread
-
-
-
-
-
-
-
-
-
-All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
-
-
-Direct Known Subclasses:
-ProcedureExecutor.TimeoutExecutorThread,
 ProcedureExecutor.WorkerThread
-
-
-Enclosing class:
-ProcedureExecutor
-
-
-
-private abstract static class ProcedureExecutor.StoppableThread
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
-
-
-
-
-
-
-
-
-
-
-
-Nested Class Summary
-
-
-
-
-Nested classes/interfaces inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
-http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.State.html?is-external=true";
 title="class or interface in java.lang">Thread.State, http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.UncaughtExceptionHandler.html?is-external=true";
 title="class or interface in 
java.lang">Thread.UncaughtExceptionHandler
-
-
-
-
-
-
-
-
-Field Summary
-
-
-
-
-Fields inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
-http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true#MAX_PRIORITY";
 title="class or interface in java.lang">MAX_PRIORITY, http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true#MIN_PRIORITY";
 title="class or interface in java.lang">MIN_PRIORITY, http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true#NORM_PRIORITY";
 title="class or interface in java.lang">NORM_PRIORITY
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors 
-
-Constructor and Description
-
-
-StoppableThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in java.lang">ThreadGroup group,
-   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All Methods Instance Methods Abstract Methods Concrete Methods 
-
-Modifier and Type
-Method and Description
-
-
-void
-awaitTermination() 
-
-
-abstract void
-sendStopSignal() 
-
-
-
-
-
-
-Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
-http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true#activeCount--";
 title="class or interface in java.lang">activeCount, http://docs.oracle.com/javase/8/docs/

hbase-site git commit: INFRA-10751 Empty commit

2018-02-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site ec8bf7616 -> 374a4b291


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/374a4b29
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/374a4b29
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/374a4b29

Branch: refs/heads/asf-site
Commit: 374a4b291be1d2113274d601ede6c6b341eae0ad
Parents: ec8bf76
Author: jenkins 
Authored: Tue Feb 20 15:13:48 2018 +
Committer: jenkins 
Committed: Tue Feb 20 15:13:48 2018 +

--

--




[02/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
index 7edb3ff..665071c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
@@ -1221,2378 +1221,2377 @@
 1213
configurationManager.registerObserver(procEnv);
 1214
 1215int cpus = 
Runtime.getRuntime().availableProcessors();
-1216final int numThreads = 
conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS,
-1217Math.max((cpus > 0? cpus/4: 
0),
-1218
MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
-1219final boolean abortOnCorruption = 
conf.getBoolean(
-1220
MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION,
-1221
MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
-1222procedureStore.start(numThreads);
-1223procedureExecutor.start(numThreads, 
abortOnCorruption);
-1224
procEnv.getRemoteDispatcher().start();
-1225  }
-1226
-1227  private void stopProcedureExecutor() 
{
-1228if (procedureExecutor != null) {
-1229  
configurationManager.deregisterObserver(procedureExecutor.getEnvironment());
-1230  
procedureExecutor.getEnvironment().getRemoteDispatcher().stop();
-1231  procedureExecutor.stop();
-1232  procedureExecutor.join();
-1233  procedureExecutor = null;
-1234}
-1235
-1236if (procedureStore != null) {
-1237  
procedureStore.stop(isAborted());
-1238  procedureStore = null;
-1239}
-1240  }
-1241
-1242  private void stopChores() {
-1243if (this.expiredMobFileCleanerChore 
!= null) {
-1244  
this.expiredMobFileCleanerChore.cancel(true);
-1245}
-1246if (this.mobCompactChore != null) 
{
-1247  
this.mobCompactChore.cancel(true);
-1248}
-1249if (this.balancerChore != null) {
-1250  this.balancerChore.cancel(true);
-1251}
-1252if (this.normalizerChore != null) 
{
-1253  
this.normalizerChore.cancel(true);
-1254}
-1255if (this.clusterStatusChore != null) 
{
-1256  
this.clusterStatusChore.cancel(true);
-1257}
-1258if (this.catalogJanitorChore != 
null) {
-1259  
this.catalogJanitorChore.cancel(true);
-1260}
-1261if (this.clusterStatusPublisherChore 
!= null){
-1262  
clusterStatusPublisherChore.cancel(true);
-1263}
-1264if (this.mobCompactThread != null) 
{
-1265  this.mobCompactThread.close();
-1266}
-1267
-1268if (this.quotaObserverChore != null) 
{
-1269  quotaObserverChore.cancel();
-1270}
-1271if (this.snapshotQuotaChore != null) 
{
-1272  snapshotQuotaChore.cancel();
-1273}
-1274  }
-1275
-1276  /**
-1277   * @return Get remote side's 
InetAddress
-1278   */
-1279  InetAddress getRemoteInetAddress(final 
int port,
-1280  final long serverStartCode) throws 
UnknownHostException {
-1281// Do it out here in its own little 
method so can fake an address when
-1282// mocking up in tests.
-1283InetAddress ia = 
RpcServer.getRemoteIp();
-1284
-1285// The call could be from the local 
regionserver,
-1286// in which case, there is no remote 
address.
-1287if (ia == null && 
serverStartCode == startcode) {
-1288  InetSocketAddress isa = 
rpcServices.getSocketAddress();
-1289  if (isa != null && 
isa.getPort() == port) {
-1290ia = isa.getAddress();
-1291  }
-1292}
-1293return ia;
-1294  }
-1295
-1296  /**
-1297   * @return Maximum time we should run 
balancer for
-1298   */
-1299  private int getMaxBalancingTime() {
-1300int maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_MAX_BALANCING, -1);
-1301if (maxBalancingTime == -1) {
-1302  // if max balancing time isn't 
set, defaulting it to period time
-1303  maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_PERIOD,
-1304
HConstants.DEFAULT_HBASE_BALANCER_PERIOD);
-1305}
-1306return maxBalancingTime;
-1307  }
-1308
-1309  /**
-1310   * @return Maximum number of regions 
in transition
-1311   */
-1312  private int 
getMaxRegionsInTransition() {
-1313int numRegions = 
this.assignmentManager.getRegionStates().getRegionAssignments().size();
-1314return Math.max((int) 
Math.floor(numRegions * this.maxRitPercent), 1);
-1315  }
-1316
-1317  /**
-1318   * It first sleep to the next balance 
plan start time. Meanwhile, throttling by the max
-1319   * number regions in transition to 
protect availability.
-1320   * @param nextBalanceStartTime The 
next balance plan start time
-1321   * @param maxRegionsInTransition max 
number of regions in transition
-1322   * @param cutoffTime when to exit 
balancer
-1323   */
-1324  private void balanceThrottlin

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
index 93f650f..d7aa8b1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
@@ -546,1472 +546,1464 @@
 538return this.conf;
 539  }
 540
-541  /**
-542   * @return true if the master is 
running, throws an exception otherwise
-543   * @throws 
org.apache.hadoop.hbase.MasterNotRunningException - if the master is not 
running
-544   * @deprecated this has been deprecated 
without a replacement
-545   */
-546  @Deprecated
-547  @Override
-548  public boolean isMasterRunning()
-549  throws MasterNotRunningException, 
ZooKeeperConnectionException {
-550// When getting the master 
connection, we check it's running,
-551// so if there is no exception, it 
means we've been able to get a
-552// connection on a running master
-553MasterKeepAliveConnection m = 
getKeepAliveMasterService();
-554m.close();
-555return true;
-556  }
-557
-558  @Override
-559  public HRegionLocation 
getRegionLocation(final TableName tableName,
-560  final byte [] row, boolean 
reload)
-561  throws IOException {
-562return reload? 
relocateRegion(tableName, row): locateRegion(tableName, row);
-563  }
-564
-565
-566  @Override
-567  public boolean isTableEnabled(TableName 
tableName) throws IOException {
-568return 
getTableState(tableName).inStates(TableState.State.ENABLED);
-569  }
-570
-571  @Override
-572  public boolean 
isTableDisabled(TableName tableName) throws IOException {
-573return 
getTableState(tableName).inStates(TableState.State.DISABLED);
-574  }
-575
-576  @Override
-577  public boolean isTableAvailable(final 
TableName tableName, @Nullable final byte[][] splitKeys)
-578  throws IOException {
-579if (this.closed) {
-580  throw new IOException(toString() + 
" closed");
-581}
-582try {
-583  if (!isTableEnabled(tableName)) {
-584LOG.debug("Table " + tableName + 
" not enabled");
-585return false;
-586  }
-587  List> locations =
-588
MetaTableAccessor.getTableRegionsAndLocations(this, tableName, true);
-589
-590  int notDeployed = 0;
-591  int regionCount = 0;
-592  for (Pair pair : locations) {
-593RegionInfo info = 
pair.getFirst();
-594if (pair.getSecond() == null) {
-595  if (LOG.isDebugEnabled()) {
-596LOG.debug("Table " + 
tableName + " has not deployed region " + pair.getFirst()
-597.getEncodedName());
-598  }
-599  notDeployed++;
-600} else if (splitKeys != null
-601&& 
!Bytes.equals(info.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
-602  for (byte[] splitKey : 
splitKeys) {
-603// Just check if the splitkey 
is available
-604if 
(Bytes.equals(info.getStartKey(), splitKey)) {
-605  regionCount++;
-606  break;
-607}
-608  }
-609} else {
-610  // Always empty start row 
should be counted
-611  regionCount++;
-612}
-613  }
-614  if (notDeployed > 0) {
-615if (LOG.isDebugEnabled()) {
-616  LOG.debug("Table " + tableName 
+ " has " + notDeployed + " regions");
-617}
-618return false;
-619  } else if (splitKeys != null 
&& regionCount != splitKeys.length + 1) {
-620if (LOG.isDebugEnabled()) {
-621  LOG.debug("Table " + tableName 
+ " expected to have " + (splitKeys.length + 1)
-622  + " regions, but only " + 
regionCount + " available");
-623}
-624return false;
-625  } else {
-626if (LOG.isDebugEnabled()) {
-627  LOG.debug("Table " + tableName 
+ " should be available");
-628}
-629return true;
-630  }
-631} catch (TableNotFoundException tnfe) 
{
-632  LOG.warn("Table " + tableName + " 
not enabled, it is not exists");
-633  return false;
-634}
-635  }
-636
-637  @Override
-638  public HRegionLocation 
locateRegion(final byte[] regionName) throws IOException {
-639RegionLocations locations = 
locateRegion(RegionInfo.getTable(regionName),
-640  RegionInfo.getStartKey(regionName), 
false, true);
-641return locations == null ? null : 
locations.getRegionLocation();
+541  private void checkClosed() throws 
DoNotRetryIOException {
+542if (this.closed) {
+543  throw new 
DoNotRetryIOException(toString(

[01/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e82a131d3 -> ec8bf7616


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/DelayedProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/DelayedProcedure.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/DelayedProcedure.html
new file mode 100644
index 000..bd92c85
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/DelayedProcedure.html
@@ -0,0 +1,100 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.procedure2;
+019
+020import 
org.apache.hadoop.hbase.procedure2.util.DelayedUtil;
+021import 
org.apache.yetus.audience.InterfaceAudience;
+022
+023@InterfaceAudience.Private
+024class DelayedProcedure extends 
DelayedUtil.DelayedContainerWithTimestamp> {
+025  public 
DelayedProcedure(Procedure procedure) {
+026super(procedure, 
procedure.getTimeoutTimestamp());
+027  }
+028}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+



[29/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/class-use/ColumnValueFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/ColumnValueFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/ColumnValueFilter.html
new file mode 100644
index 000..93700df
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/ColumnValueFilter.html
@@ -0,0 +1,170 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.filter.ColumnValueFilter (Apache 
HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.filter.ColumnValueFilter
+
+
+
+
+
+Packages that use ColumnValueFilter 
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.filter
+
+Provides row-level filters applied to HRegion scan results 
during calls to
+ ResultScanner.next().
+
+
+
+
+
+
+
+
+
+
+Uses of ColumnValueFilter in org.apache.hadoop.hbase.filter
+
+Methods in org.apache.hadoop.hbase.filter
 that return ColumnValueFilter 
+
+Modifier and Type
+Method and Description
+
+
+
+static ColumnValueFilter
+ColumnValueFilter.parseFrom(byte[] pbBytes)
+Parse protobuf bytes to a ColumnValueFilter
+
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2018 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 0c342b2..ee00b7e 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -259,109 +259,113 @@
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cell c) 
+ColumnValueFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-FilterWrapper.filterCell(Cell c) 
+QualifierFilter.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cell cell) 
+FilterWrapper.filterCell(Cell c) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cell cell) 
+ColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
+MultipleColumnPrefixFilter.filterColumn(Cell cell) 
+
+
+Filter.ReturnCode
 ValueFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 SkipFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 FilterListBase.filterKeyValue(Cell c) 
 
-
+
 Filter.ReturnCode
 FamilyFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 ColumnPrefixFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 PageFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 RowFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 ColumnRangeFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 ColumnCountGetFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 MultipleColumnPrefixFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 ColumnPaginationFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 DependentColumnFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 InclusiveStopFilter.filterKeyValue(Cell c)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 KeyOnlyFilter.filterKeyValue(Cell ignored)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 MultiRowRangeFilter.filterKeyValue(Cell ignored)
 Deprecated. 
 
 
-
+
 Filter.ReturnCode
 Filter.filterKeyValue(Cell c)
 Deprecated. 
@@ -370,93 +374,93 @@
 
 
 
-
+
 Filter.ReturnCode
 FirstKeyOnlyFilter.filterKeyValue(Cell c

[03/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
index 7edb3ff..665071c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
@@ -1221,2378 +1221,2377 @@
 1213
configurationManager.registerObserver(procEnv);
 1214
 1215int cpus = 
Runtime.getRuntime().availableProcessors();
-1216final int numThreads = 
conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS,
-1217Math.max((cpus > 0? cpus/4: 
0),
-1218
MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
-1219final boolean abortOnCorruption = 
conf.getBoolean(
-1220
MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION,
-1221
MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
-1222procedureStore.start(numThreads);
-1223procedureExecutor.start(numThreads, 
abortOnCorruption);
-1224
procEnv.getRemoteDispatcher().start();
-1225  }
-1226
-1227  private void stopProcedureExecutor() 
{
-1228if (procedureExecutor != null) {
-1229  
configurationManager.deregisterObserver(procedureExecutor.getEnvironment());
-1230  
procedureExecutor.getEnvironment().getRemoteDispatcher().stop();
-1231  procedureExecutor.stop();
-1232  procedureExecutor.join();
-1233  procedureExecutor = null;
-1234}
-1235
-1236if (procedureStore != null) {
-1237  
procedureStore.stop(isAborted());
-1238  procedureStore = null;
-1239}
-1240  }
-1241
-1242  private void stopChores() {
-1243if (this.expiredMobFileCleanerChore 
!= null) {
-1244  
this.expiredMobFileCleanerChore.cancel(true);
-1245}
-1246if (this.mobCompactChore != null) 
{
-1247  
this.mobCompactChore.cancel(true);
-1248}
-1249if (this.balancerChore != null) {
-1250  this.balancerChore.cancel(true);
-1251}
-1252if (this.normalizerChore != null) 
{
-1253  
this.normalizerChore.cancel(true);
-1254}
-1255if (this.clusterStatusChore != null) 
{
-1256  
this.clusterStatusChore.cancel(true);
-1257}
-1258if (this.catalogJanitorChore != 
null) {
-1259  
this.catalogJanitorChore.cancel(true);
-1260}
-1261if (this.clusterStatusPublisherChore 
!= null){
-1262  
clusterStatusPublisherChore.cancel(true);
-1263}
-1264if (this.mobCompactThread != null) 
{
-1265  this.mobCompactThread.close();
-1266}
-1267
-1268if (this.quotaObserverChore != null) 
{
-1269  quotaObserverChore.cancel();
-1270}
-1271if (this.snapshotQuotaChore != null) 
{
-1272  snapshotQuotaChore.cancel();
-1273}
-1274  }
-1275
-1276  /**
-1277   * @return Get remote side's 
InetAddress
-1278   */
-1279  InetAddress getRemoteInetAddress(final 
int port,
-1280  final long serverStartCode) throws 
UnknownHostException {
-1281// Do it out here in its own little 
method so can fake an address when
-1282// mocking up in tests.
-1283InetAddress ia = 
RpcServer.getRemoteIp();
-1284
-1285// The call could be from the local 
regionserver,
-1286// in which case, there is no remote 
address.
-1287if (ia == null && 
serverStartCode == startcode) {
-1288  InetSocketAddress isa = 
rpcServices.getSocketAddress();
-1289  if (isa != null && 
isa.getPort() == port) {
-1290ia = isa.getAddress();
-1291  }
-1292}
-1293return ia;
-1294  }
-1295
-1296  /**
-1297   * @return Maximum time we should run 
balancer for
-1298   */
-1299  private int getMaxBalancingTime() {
-1300int maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_MAX_BALANCING, -1);
-1301if (maxBalancingTime == -1) {
-1302  // if max balancing time isn't 
set, defaulting it to period time
-1303  maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_PERIOD,
-1304
HConstants.DEFAULT_HBASE_BALANCER_PERIOD);
-1305}
-1306return maxBalancingTime;
-1307  }
-1308
-1309  /**
-1310   * @return Maximum number of regions 
in transition
-1311   */
-1312  private int 
getMaxRegionsInTransition() {
-1313int numRegions = 
this.assignmentManager.getRegionStates().getRegionAssignments().size();
-1314return Math.max((int) 
Math.floor(numRegions * this.maxRitPercent), 1);
-1315  }
-1316
-1317  /**
-1318   * It first sleep to the next balance 
plan start time. Meanwhile, throttling by the max
-1319   * number regions in transition to 
protect availability.
-1320   * @param nextBalanceStartTime The 
next balance plan start time
-1321   * @param maxRegionsInTransition max 
number of regions in transition
-1322   * @par

[21/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html
index a2faf45..762be87 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html
@@ -248,7 +248,7 @@ the order they are declared.
 
 
 values
-public static RootProcedureState.State[] values()
+public static RootProcedureState.State[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -268,7 +268,7 @@ for (RootProcedureState.State c : 
RootProcedureState.State.values())
 
 
 valueOf
-public static RootProcedureState.State valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static RootProcedureState.State valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
index 9501985..9159a18 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -301,7 +301,7 @@ not permitted.)
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/StoppableThread.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/StoppableThread.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/StoppableThread.html
new file mode 100644
index 000..e3b4978
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/StoppableThread.html
@@ -0,0 +1,366 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+StoppableThread (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.procedure2
+Class StoppableThread
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">java.lang.Thread
+
+
+org.apache.hadoop.hbase.procedure2.StoppableThread
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
+
+
+Direct Known Subclasses:
+ProcedureExecutor.WorkerThread, TimeoutExecutorThread
+
+
+
+@InterfaceAudience.Private
+abstract class StoppableThread
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited f

[28/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 053f236..5ab0fb3 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -106,6 +106,7 @@
 org.apache.hadoop.hbase.filter.ColumnPaginationFilter
 org.apache.hadoop.hbase.filter.ColumnPrefixFilter
 org.apache.hadoop.hbase.filter.ColumnRangeFilter
+org.apache.hadoop.hbase.filter.ColumnValueFilter
 org.apache.hadoop.hbase.filter.CompareFilter
 
 org.apache.hadoop.hbase.filter.DependentColumnFilter
@@ -182,14 +183,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
 org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-use.html
index 7094646..271a1f9 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-use.html
@@ -279,18 +279,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+ColumnValueFilter
+Different from SingleColumnValueFilter which 
returns an entire row
+ when specified condition is matched, ColumnValueFilter return the 
matched cell only.
+
+
+
 CompareFilter
 This is a generic filter to be used to filter by 
comparison.
 
 
-
+
 CompareFilter.CompareOp
 Deprecated. 
 since 2.0.0. Will be 
removed in 3.0.0. Use CompareOperator 
instead.
 
 
 
-
+
 DependentColumnFilter
 A filter for adding inter-column timestamp matching
  Only cells with a correspondingly timestamped entry in
@@ -299,197 +305,197 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
  full rows for correct filtering
 
 
-
+
 FamilyFilter
 
  This filter is used to filter based on the column family.
 
 
-
+
 Filter
 Interface for row and column filters directly applied 
within the regionserver.
 
 
-
+
 Filter.ReturnCode
 Return codes for filterValue().
 
 
-
+
 FilterBase
 Abstract base class to help you implement new Filters.
 
 
-
+
 FilterList
 Implementation of Filter that represents an 
ordered List of Filters which will be
  evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL
 (AND) or
  FilterList.Operator.MUST_PASS_ONE
 (OR).
 
 
-
+
 FilterList.Operator
 set operator
 
 
-
+
 FilterListBase
 Base class for FilterList.
 
 
-
+
 FilterWrapper
 This is a Filter wrapper class which is used in the server 
side.
 
 
-
+
 FilterWrapper.FilterRowRetCode 
 
-
+
 FirstKeyOnlyFilter
 A filter that will only return the first KV from each 
row.
 
 
-
+
 FirstKeyValueMatchingQualifiersFilter
 Deprecated. 
 Deprecated in 2.0. See 
HBASE-13347
 
 
 
-
+
 FuzzyRowFilter
 This is optimized version of a standard FuzzyRowFilter 
Filters data based on fuzzy row key.
 
 
-
+
 FuzzyRowFilter.Order
 Abstracts directional comparisons based on scan 
direction.
 
 
-
+
 FuzzyRowFilter.RowTracker
 If we have multiple fuzzy keys, row tracker should improve 
overall performance.
 
 
-
+
 FuzzyRowFilter.SatisfiesCode 
 
-
+
 InclusiveStopFilter
 A Filter that stops after the given row.
 
 
-
+
 KeyOnlyFilter
 A filter that will only return the key component of each KV 
(the value will
  be rewritten as empty).
 
 
-
+
 LongComparator
 A long comparator which numerical compares against the 
specified byte array
 
 
-
+
 MultipleColumnPrefixFilter
 This filter is used for selecting only those keys with 
columns that matches
  a particular prefix.

[04/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
index 7edb3ff..665071c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
@@ -1221,2378 +1221,2377 @@
 1213
configurationManager.registerObserver(procEnv);
 1214
 1215int cpus = 
Runtime.getRuntime().availableProcessors();
-1216final int numThreads = 
conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS,
-1217Math.max((cpus > 0? cpus/4: 
0),
-1218
MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
-1219final boolean abortOnCorruption = 
conf.getBoolean(
-1220
MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION,
-1221
MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
-1222procedureStore.start(numThreads);
-1223procedureExecutor.start(numThreads, 
abortOnCorruption);
-1224
procEnv.getRemoteDispatcher().start();
-1225  }
-1226
-1227  private void stopProcedureExecutor() 
{
-1228if (procedureExecutor != null) {
-1229  
configurationManager.deregisterObserver(procedureExecutor.getEnvironment());
-1230  
procedureExecutor.getEnvironment().getRemoteDispatcher().stop();
-1231  procedureExecutor.stop();
-1232  procedureExecutor.join();
-1233  procedureExecutor = null;
-1234}
-1235
-1236if (procedureStore != null) {
-1237  
procedureStore.stop(isAborted());
-1238  procedureStore = null;
-1239}
-1240  }
-1241
-1242  private void stopChores() {
-1243if (this.expiredMobFileCleanerChore 
!= null) {
-1244  
this.expiredMobFileCleanerChore.cancel(true);
-1245}
-1246if (this.mobCompactChore != null) 
{
-1247  
this.mobCompactChore.cancel(true);
-1248}
-1249if (this.balancerChore != null) {
-1250  this.balancerChore.cancel(true);
-1251}
-1252if (this.normalizerChore != null) 
{
-1253  
this.normalizerChore.cancel(true);
-1254}
-1255if (this.clusterStatusChore != null) 
{
-1256  
this.clusterStatusChore.cancel(true);
-1257}
-1258if (this.catalogJanitorChore != 
null) {
-1259  
this.catalogJanitorChore.cancel(true);
-1260}
-1261if (this.clusterStatusPublisherChore 
!= null){
-1262  
clusterStatusPublisherChore.cancel(true);
-1263}
-1264if (this.mobCompactThread != null) 
{
-1265  this.mobCompactThread.close();
-1266}
-1267
-1268if (this.quotaObserverChore != null) 
{
-1269  quotaObserverChore.cancel();
-1270}
-1271if (this.snapshotQuotaChore != null) 
{
-1272  snapshotQuotaChore.cancel();
-1273}
-1274  }
-1275
-1276  /**
-1277   * @return Get remote side's 
InetAddress
-1278   */
-1279  InetAddress getRemoteInetAddress(final 
int port,
-1280  final long serverStartCode) throws 
UnknownHostException {
-1281// Do it out here in its own little 
method so can fake an address when
-1282// mocking up in tests.
-1283InetAddress ia = 
RpcServer.getRemoteIp();
-1284
-1285// The call could be from the local 
regionserver,
-1286// in which case, there is no remote 
address.
-1287if (ia == null && 
serverStartCode == startcode) {
-1288  InetSocketAddress isa = 
rpcServices.getSocketAddress();
-1289  if (isa != null && 
isa.getPort() == port) {
-1290ia = isa.getAddress();
-1291  }
-1292}
-1293return ia;
-1294  }
-1295
-1296  /**
-1297   * @return Maximum time we should run 
balancer for
-1298   */
-1299  private int getMaxBalancingTime() {
-1300int maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_MAX_BALANCING, -1);
-1301if (maxBalancingTime == -1) {
-1302  // if max balancing time isn't 
set, defaulting it to period time
-1303  maxBalancingTime = 
getConfiguration().getInt(HConstants.HBASE_BALANCER_PERIOD,
-1304
HConstants.DEFAULT_HBASE_BALANCER_PERIOD);
-1305}
-1306return maxBalancingTime;
-1307  }
-1308
-1309  /**
-1310   * @return Maximum number of regions 
in transition
-1311   */
-1312  private int 
getMaxRegionsInTransition() {
-1313int numRegions = 
this.assignmentManager.getRegionStates().getRegionAssignments().size();
-1314return Math.max((int) 
Math.floor(numRegions * this.maxRitPercent), 1);
-1315  }
-1316
-1317  /**
-1318   * It first sleep to the next balance 
plan start time. Meanwhile, throttling by the max
-1319   * number regions in transition to 
protect availability.
-1320   * @param nextBalanceStartTime The 
next balance plan start time
-1321   * @param maxRegionsInTransition max 
number of r

[51/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/ec8bf761
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/ec8bf761
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/ec8bf761

Branch: refs/heads/asf-site
Commit: ec8bf76162d96b4f4f183a89262fc43f827e6435
Parents: e82a131
Author: jenkins 
Authored: Tue Feb 20 15:13:26 2018 +
Committer: jenkins 
Committed: Tue Feb 20 15:13:26 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apidocs/allclasses-frame.html   | 1 +
 apidocs/allclasses-noframe.html | 1 +
 apidocs/deprecated-list.html|   642 +-
 apidocs/index-all.html  |42 +-
 .../apache/hadoop/hbase/HBaseIOException.html   | 2 +-
 .../hadoop/hbase/MasterNotRunningException.html |17 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   100 +-
 .../hadoop/hbase/class-use/CompareOperator.html |21 +-
 .../hbase/class-use/HBaseIOException.html   |30 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.html  | 4 +-
 .../hadoop/hbase/filter/ColumnValueFilter.html  |   647 +
 .../hbase/filter/CompareFilter.CompareOp.html   |20 +-
 .../hadoop/hbase/filter/CompareFilter.html  |46 +-
 .../apache/hadoop/hbase/filter/ParseFilter.html |47 +-
 .../filter/class-use/ByteArrayComparable.html   |50 +-
 .../filter/class-use/ColumnValueFilter.html |   170 +
 .../filter/class-use/Filter.ReturnCode.html |70 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |77 +-
 .../hadoop/hbase/filter/package-frame.html  | 1 +
 .../hadoop/hbase/filter/package-summary.html|65 +-
 .../hadoop/hbase/filter/package-tree.html   | 1 +
 .../apache/hadoop/hbase/filter/package-use.html |68 +-
 .../org/apache/hadoop/hbase/package-tree.html   | 2 +-
 apidocs/overview-tree.html  | 3 +-
 apidocs/serialized-form.html| 2 +-
 .../hadoop/hbase/MasterNotRunningException.html |66 +-
 .../hadoop/hbase/filter/ColumnValueFilter.html  |   313 +
 .../hbase/filter/CompareFilter.CompareOp.html   |   580 +-
 .../hadoop/hbase/filter/CompareFilter.html  |   580 +-
 .../apache/hadoop/hbase/filter/ParseFilter.html |  1346 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 26332 -
 checkstyle.rss  |98 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 8 +-
 devapidocs/allclasses-noframe.html  | 8 +-
 devapidocs/constant-values.html |13 +-
 devapidocs/deprecated-list.html |   530 +-
 devapidocs/index-all.html   |   178 +-
 .../apache/hadoop/hbase/HBaseIOException.html   | 2 +-
 .../hadoop/hbase/MasterNotRunningException.html |19 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   122 +-
 .../hadoop/hbase/class-use/CompareOperator.html |33 +-
 .../hbase/class-use/DoNotRetryIOException.html  |12 +-
 .../hbase/class-use/HBaseIOException.html   |32 +-
 .../class-use/MasterNotRunningException.html|20 -
 .../hadoop/hbase/class-use/ServerName.html  |   130 +-
 .../hadoop/hbase/client/ClusterConnection.html  |   173 +-
 ...ectionImplementation.MasterServiceState.html |18 +-
 ...onImplementation.MasterServiceStubMaker.html |10 +-
 ...ntation.ServerErrorTracker.ServerErrors.html |10 +-
 ...ectionImplementation.ServerErrorTracker.html |20 +-
 .../hbase/client/ConnectionImplementation.html  |   310 +-
 .../ConnectionUtils.MasterlessConnection.html   | 2 +-
 ...nUtils.ShortCircuitingClusterConnection.html |28 +-
 .../HTable.CheckAndMutateBuilderImpl.html   |28 +-
 .../org/apache/hadoop/hbase/client/HTable.html  |   134 +-
 .../hadoop/hbase/client/MasterCallable.html |34 +-
 .../class-use/MasterKeepAliveConnection.html|16 +-
 .../hadoop/hbase/client/package-tree.html   |22 +-
 .../class-use/DeserializationException.html | 8 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.html  | 4 +-
 .../hadoop/hbase/filter/ColumnValueFilter.html  |   796 +
 .../hbase/filter/CompareF

[09/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/MasterCallable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/MasterCallable.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/MasterCallable.html
index 9d27237..d2c2295 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/MasterCallable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/MasterCallable.html
@@ -28,129 +28,128 @@
 020
 021import java.io.Closeable;
 022import java.io.IOException;
-023
-024import 
org.apache.hadoop.hbase.HRegionInfo;
-025import 
org.apache.hadoop.hbase.TableName;
-026import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-027import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-028import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-029import 
org.apache.hadoop.hbase.util.Bytes;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031
-032/**
-033 * A RetryingCallable for Master RPC 
operations.
-034 * Implement the #rpcCall method. It will 
be retried on error. See its javadoc and the javadoc of
-035 * #call(int). See {@link HBaseAdmin} for 
examples of how this is used. To get at the
-036 * rpcController that has been created 
and configured to make this rpc call, use getRpcController().
-037 * We are trying to contain all protobuf 
references including references to rpcController so we
-038 * don't pollute codebase with protobuf 
references; keep the protobuf references contained and only
-039 * present in a few classes rather than 
all about the code base.
-040 * 

Like {@link RegionServerCallable} only in here, we can safely be PayloadCarryingRpcController -041 * all the time. This is not possible in the similar {@link RegionServerCallable} Callable because -042 * it has to deal with Coprocessor Endpoints. -043 * @param return type -044 */ -045@InterfaceAudience.Private -046abstract class MasterCallable implements RetryingCallable, Closeable { -047 protected final ClusterConnection connection; -048 protected MasterKeepAliveConnection master; -049 private final HBaseRpcController rpcController; -050 -051 MasterCallable(final Connection connection, final RpcControllerFactory rpcConnectionFactory) { -052this.connection = (ClusterConnection) connection; -053this.rpcController = rpcConnectionFactory.newController(); -054 } -055 -056 @Override -057 public void prepare(boolean reload) throws IOException { -058this.master = this.connection.getKeepAliveMasterService(); -059 } -060 -061 @Override -062 public void close() throws IOException { -063// The above prepare could fail but this would still be called though masterAdmin is null -064if (this.master != null) { -065 this.master.close(); -066 this.master = null; -067} -068 } -069 -070 @Override -071 public void throwable(Throwable t, boolean retrying) { -072 } -073 -074 @Override -075 public String getExceptionMessageAdditionalDetail() { -076return ""; -077 } -078 -079 @Override -080 public long sleep(long pause, int tries) { -081return ConnectionUtils.getPauseTime(pause, tries); -082 } -083 -084 /** -085 * Override that changes the {@link java.util.concurrent.Callable#call()} Exception from {@link Exception} to -086 * {@link IOException}. It also does setup of an rpcController and calls through to the rpcCall() -087 * method which callers are expected to implement. If rpcController is an instance of -088 * PayloadCarryingRpcController, we will set a timeout on it. -089 */ -090 @Override -091 // Same trick as in RegionServerCallable so users don't have to copy/paste so much boilerplate -092 // and so we contain references to protobuf. We can't set priority on the rpcController as -093 // we do in RegionServerCallable because we don't always have a Table when we call. -094 public V call(int callTimeout) throws IOException { -095try { -096 if (this.rpcController != null) { -097this.rpcController.reset(); -098 this.rpcController.setCallTimeout(callTimeout); -099 } -100 return rpcCall(); -101} catch (Exception e) { -102 throw ProtobufUtil.handleRemoteException(e); -103} -104 } -105 -106 /** -107 * Run the RPC call. Implement this method. To get at the rpcController that has been created -108 * and configured to make this rpc call, use getRpcController(). We are trying to contain -109 * rpcController references so we don't pollute codebase with protobuf references; keep the -110 * protobuf references contained and only present in a few classes rather than all about the -111 * code base. -112 * @throws Exception -113 */ -114 protected abstract V rpcCall() throws Exception; -115 -116 HBaseRpcController getRpcController() { -117return this.rpcController; -118 } -119 -120 void setPriority(f


[16/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
index 93f650f..d7aa8b1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
@@ -546,1472 +546,1464 @@
 538return this.conf;
 539  }
 540
-541  /**
-542   * @return true if the master is 
running, throws an exception otherwise
-543   * @throws 
org.apache.hadoop.hbase.MasterNotRunningException - if the master is not 
running
-544   * @deprecated this has been deprecated 
without a replacement
-545   */
-546  @Deprecated
-547  @Override
-548  public boolean isMasterRunning()
-549  throws MasterNotRunningException, 
ZooKeeperConnectionException {
-550// When getting the master 
connection, we check it's running,
-551// so if there is no exception, it 
means we've been able to get a
-552// connection on a running master
-553MasterKeepAliveConnection m = 
getKeepAliveMasterService();
-554m.close();
-555return true;
-556  }
-557
-558  @Override
-559  public HRegionLocation 
getRegionLocation(final TableName tableName,
-560  final byte [] row, boolean 
reload)
-561  throws IOException {
-562return reload? 
relocateRegion(tableName, row): locateRegion(tableName, row);
-563  }
-564
-565
-566  @Override
-567  public boolean isTableEnabled(TableName 
tableName) throws IOException {
-568return 
getTableState(tableName).inStates(TableState.State.ENABLED);
-569  }
-570
-571  @Override
-572  public boolean 
isTableDisabled(TableName tableName) throws IOException {
-573return 
getTableState(tableName).inStates(TableState.State.DISABLED);
-574  }
-575
-576  @Override
-577  public boolean isTableAvailable(final 
TableName tableName, @Nullable final byte[][] splitKeys)
-578  throws IOException {
-579if (this.closed) {
-580  throw new IOException(toString() + 
" closed");
-581}
-582try {
-583  if (!isTableEnabled(tableName)) {
-584LOG.debug("Table " + tableName + 
" not enabled");
-585return false;
-586  }
-587  List> locations =
-588
MetaTableAccessor.getTableRegionsAndLocations(this, tableName, true);
-589
-590  int notDeployed = 0;
-591  int regionCount = 0;
-592  for (Pair pair : locations) {
-593RegionInfo info = 
pair.getFirst();
-594if (pair.getSecond() == null) {
-595  if (LOG.isDebugEnabled()) {
-596LOG.debug("Table " + 
tableName + " has not deployed region " + pair.getFirst()
-597.getEncodedName());
-598  }
-599  notDeployed++;
-600} else if (splitKeys != null
-601&& 
!Bytes.equals(info.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
-602  for (byte[] splitKey : 
splitKeys) {
-603// Just check if the splitkey 
is available
-604if 
(Bytes.equals(info.getStartKey(), splitKey)) {
-605  regionCount++;
-606  break;
-607}
-608  }
-609} else {
-610  // Always empty start row 
should be counted
-611  regionCount++;
-612}
-613  }
-614  if (notDeployed > 0) {
-615if (LOG.isDebugEnabled()) {
-616  LOG.debug("Table " + tableName 
+ " has " + notDeployed + " regions");
-617}
-618return false;
-619  } else if (splitKeys != null 
&& regionCount != splitKeys.length + 1) {
-620if (LOG.isDebugEnabled()) {
-621  LOG.debug("Table " + tableName 
+ " expected to have " + (splitKeys.length + 1)
-622  + " regions, but only " + 
regionCount + " available");
-623}
-624return false;
-625  } else {
-626if (LOG.isDebugEnabled()) {
-627  LOG.debug("Table " + tableName 
+ " should be available");
-628}
-629return true;
-630  }
-631} catch (TableNotFoundException tnfe) 
{
-632  LOG.warn("Table " + tableName + " 
not enabled, it is not exists");
-633  return false;
-634}
-635  }
-636
-637  @Override
-638  public HRegionLocation 
locateRegion(final byte[] regionName) throws IOException {
-639RegionLocations locations = 
locateRegion(RegionInfo.getTable(regionName),
-640  RegionInfo.getStartKey(regionName), 
false, true);
-641return locations == null ? null : 
locations.getRegionLocation();
+541  private void checkClosed() throws 
DoNotRetryIOException {
+542if (this.closed) {
+543  throw new 
DoNotRetryI

[33/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
index 7ecc0db..198ba2f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
@@ -210,7 +210,7 @@ extends ConnectionImplementation
-abort,
 cacheLocation,
 clearCaches,
 clearRegionCache,
 clearRegionCache,
 clearRegionCache,
 close, deleteCachedRegionLocation,
 finalize,
 getAdmin,
 getAdmin,
 getAdminForMaster,
 getAsyncProcess,
 getBackoffPolicy,
 getBufferedMutator,
 getBufferedMutator,
 getCachedLocation,
 getClient,
 getConfiguration,
 getConnectionConfiguration,
 getConnectionMetrics, getCurrentBatchPool,
 getCurrentMetaLookupPool,
 getCurrentNrHRS,
 getKeepAliveMasterService,
 getMaster,
 getNewRpcRetryingCallerFactory,
 getNonceGenerator, getNumberOfCachedRegionLocations,
 getRegionLocation,
 getRegionLocator,
 getRpcClient,
 getRpcControllerFactory,
 getRpcRetryingCallerFactory,
 getStatisticsTracker,
 getTable,
 getTableBuilder,
 getTableState,
 hasCellBlockSupport,
 injectNonceGeneratorForTesting
 , isAborted,
 isClosed,
 isDeadServer,
 isMasterRunning,
 isTableAvailable,
 isTableEnabled,
 locateRegion,
 locateRegion,
 locateRegion,
 locateRegion,
 locateRegions,
 locateRegions,
 releaseMaster,
 relocateRegion,
 relocateRegion,
 retrieveClusterId,
 setUseMetaReplicas,
 toString,
 updateCachedLocation,
 updateCachedLocations
+abort,
 cacheLocation,
 clearCaches,
 clearRegionCache,
 clearRegionCache,
 clearRegionCache,
 close, deleteCachedRegionLocation,
 finalize,
 getAdmin,
 getAdmin,
 getAdminForMaster,
 getAsyncProcess,
 getBackoffPolicy,
 getBufferedMutator,
 getBufferedMutator,
 getCachedLocation,
 getClient,
 getConfiguration,
 getConnectionConfiguration,
 getConnectionMetrics, getCurrentBatchPool,
 getCurrentMetaLookupPool,
 getCurrentNrHRS,
 getMaster,
 getNewRpcRetryingCallerFactory,
 getNonceGenerator,
 getNumberOfCachedRegionLocations,
 getRegionLocation,
 getRegionLocator,
 getRpcClient,
 getRpcControllerFactory,
 getRpcRetryingCallerFactory,
 getStatisticsTracker,
 getTable,
 getTableBuilder,
 getTableState,
 hasCellBlockSupport,
 injectNonceGeneratorForTesting,
 isAborted,
 isClosed,
 isMasterRunning,
 isTableAvailable,
 isTableEnabled,
 locateRegion,
 locateRegion,
 locateRegion,
 locateRegion,
 locateRegions,
 locateRegions,
 releaseMaster,
 relocateRegion,
 relocateRegion, retrieveClusterId,
 setUseMetaReplicas,
 toString,
 updateCachedLocation,
 updateCachedLocations
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
index c4a71e3..56967b7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
@@ -240,9 +240,8 @@ extends 
 MasterKeepAliveConnection
-getKeepAliveMasterService()
-This function allows HBaseAdmin and potentially others to 
get a shared MasterService
- connection.
+getMaster()
+Returns a MasterKeepAliveConnection to 
the active master
 
 
 
@@ -251,7 +250,7 @@ extends ConnectionImplementation
-abort,
 cacheLocation,
 clearCaches,
 clearRegionCache,
 clearRegionCache,
 clearRegionCache,
 close, deleteCachedRegionLocation,
 finalize,
 getAdmin,
 getAdminForMaster,
 getAsyncProcess,
 getBackoffPolicy,
 getBufferedMutator, getBufferedMutator,
 getCachedLocation,
 getConfiguration,
 getConnectionConfiguration,
 getConnectionMetrics,
 getCurrentBatchPool,
 getCurrentMetaLookupPool, getCurrentNrHRS,
 getMaster,
 getNewRpcRetryingCallerFactory,
 getNonceGenerator,
 getNumberOfCachedRegionLocations,
 getRegionLocation,
 getRegionLocator,
 getRpcClient,
 getRpcControllerFactory,
 getRpcRetryingCallerFactory,
 getStatisticsTracker,
 getTable,
 getTableBuilder, getTableState,
 hasCellBlockSupport,
 injectNonceGeneratorForTesting,
 isAborted,
 isClosed,
 isDeadServer,
 isMasterRunning, isTableAvailable,
 isTableDisabled,
 isTableEnabled,
 locateRegion,
 locateRegion,
 locate
 Region, locateRegion,
 locateRegions,
 locateRegions,
 releaseMaster,
 relocateRegion,
 relocateRegion, retrieveClusterId,
 setUseMetaReplicas,
 toString,
 updateCachedLocation,
 updateCachedLocations
+abort,
 cacheLocation,
 clearCac

[23/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
index 6cba39a..3a47680 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerThread.html
@@ -103,7 +103,7 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">java.lang.Thread
 
 
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.StoppableThread
+org.apache.hadoop.hbase.procedure2.StoppableThread
 
 
 org.apache.hadoop.hbase.procedure2.ProcedureExecutor.WorkerThread
@@ -122,13 +122,17 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
 
 
+Direct Known Subclasses:
+ProcedureExecutor.KeepAliveWorkerThread
+
+
 Enclosing class:
 ProcedureExecutor
 
 
 
-private final class ProcedureExecutor.WorkerThread
-extends ProcedureExecutor.StoppableThread
+private class ProcedureExecutor.WorkerThread
+extends StoppableThread
 
 
 
@@ -163,7 +167,7 @@ extends Field and Description
 
 
-private Procedure
+private Procedure
 activeProcedure 
 
 
@@ -189,10 +193,17 @@ extends 
 Constructors 
 
-Constructor and Description
+Modifier
+Constructor and Description
 
 
-WorkerThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in 
java.lang">ThreadGroup group) 
+ 
+WorkerThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in 
java.lang">ThreadGroup group) 
+
+
+protected 
+WorkerThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in java.lang">ThreadGroup group,
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String prefix) 
 
 
 
@@ -214,7 +225,7 @@ extends getCurrentRunTime() 
 
 
-private boolean
+protected boolean
 keepAlive(long lastUpdate) 
 
 
@@ -231,11 +242,11 @@ extends 
-
+
 
 
-Methods inherited from class org.apache.hadoop.hbase.procedure2.ProcedureExecutor.StoppableThread
-awaitTermination
+Methods inherited from class org.apache.hadoop.hbase.procedure2.StoppableThread
+awaitTermination
 
 
 
@@ -271,7 +282,7 @@ extends 
 
 executionStartTime
-private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicLong executionStartTime
+private final http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicLong executionStartTime
 
 
 
@@ -280,7 +291,7 @@ extends 
 
 activeProcedure
-private Procedure activeProcedure
+private volatile Procedure activeProcedure
 
 
 
@@ -294,10 +305,20 @@ extends 
 
 
+
+
+WorkerThread
+public WorkerThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in java.lang">ThreadGroup group)
+
+
+
+
+
 
 
 WorkerThread
-public WorkerThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in java.lang">ThreadGroup group)
+protected WorkerThread(http://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true";
 title="class or interface in java.lang">ThreadGroup group,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String prefix)
 
 
 
@@ -314,10 +335,10 @@ extends 
 
 sendStopSignal
-public void sendStopSignal()
+public void sendStopSignal()
 
 Specified by:
-sendStopSignal in
 class ProcedureExecutor.StoppableThread
+sendStopSignal in
 class StoppableThread
 
 
 
@@ -327,7 +348,7 @@ extends 
 
 run
-public void run()
+public void run()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--";
 title="class or interface in java.lang">run in 
interface http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable
@@ -342,7 +363,7 @@ extends 
 
 toString
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
+public http://docs.oracle.com/javase/8/docs/a

[36/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 514cc1e..0214141 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -1621,35 +1621,27 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-boolean
+private boolean
 ConnectionImplementation.isDeadServer(ServerName sn) 
 
 
 boolean
-ClusterConnection.isDeadServer(ServerName serverName)
-Deprecated. 
-internal method, do not 
use thru ClusterConnection
-
-
-
-
-boolean
 ClusterStatusListener.isDeadServer(ServerName sn)
 Check if we know if a server is dead.
 
 
-
+
 protected boolean
 PreemptiveFastFailInterceptor.isServerInFailureMap(ServerName serverName) 
 
-
+
 private void
 AsyncBatchRpcRetryingCaller.logException(int tries,
 http://docs.oracle.com/javase/8/docs/api/java/util/function/Supplier.html?is-external=true";
 title="class or interface in java.util.function">SupplierStream> regionsSupplier,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable error,
 ServerName serverName) 
 
-
+
 private void
 AsyncRequestFutureImpl.logNoResubmit(ServerName oldServer,
  int numAttempt,
@@ -1658,31 +1650,31 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
  int failed,
  int stopped) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncHBaseAdmin.majorCompactRegionServer(ServerName serverName) 
 
-
+
 void
 Admin.majorCompactRegionServer(ServerName serverName)
 Major compact all regions on the region server.
 
 
-
+
 void
 HBaseAdmin.majorCompactRegionServer(ServerName serverName) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncAdmin.majorCompactRegionServer(ServerName serverName)
 Compact all regions on the region server.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 RawAsyncHBaseAdmin.majorCompactRegionServer(ServerName sn) 
 
-
+
 (package private) AsyncRequestFutureImpl.Retry
 AsyncRequestFutureImpl.manageError(int originalIndex,
Row row,
@@ -1692,30 +1684,30 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Check that we can retry acts accordingly: logs, set the 
error status.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncHBaseAdmin.move(byte[] regionName,
 ServerName destServerName) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncAdmin.move(byte[] regionName,
 ServerName destServerName)
 Move the region r to dest.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 RawAsyncHBaseAdmin.move(byte[] regionName,
 ServerName destServerName) 
 
-
+
 void
 ClusterStatusListener.DeadServerHandler.newDead(ServerName sn)
 Called when a server is identified as dead.
 
 
-
+
 private void
 AsyncBatchRpcRetryingCaller.onComplete(Action action,
   AsyncBatchRpcRetryingCaller.RegionReque

[45/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
new file mode 100644
index 000..ee5595f
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
@@ -0,0 +1,313 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase.filter;
+021
+022import java.io.IOException;
+023import java.util.ArrayList;
+024
+025import org.apache.hadoop.hbase.Cell;
+026import 
org.apache.hadoop.hbase.CellUtil;
+027import 
org.apache.hadoop.hbase.CompareOperator;
+028import 
org.apache.hadoop.hbase.PrivateCellUtil;
+029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+030import 
org.apache.hadoop.hbase.util.Bytes;
+031import 
org.apache.yetus.audience.InterfaceAudience;
+032
+033import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+034import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+035import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+036
+037import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+040
+041/**
+042 * Different from {@link 
SingleColumnValueFilter} which returns an entire row
+043 * when specified condition is matched, 
{@link ColumnValueFilter} return the matched cell only.
+044 * 

+045 * This filter is used to filter cells based on column and value. +046 * It takes a {@link org.apache.hadoop.hbase.CompareOperator} operator (<, <=, =, !=, >, >=), and +047 * and a {@link ByteArrayComparable} comparator. +048 */ +049@InterfaceAudience.Public +050public class ColumnValueFilter extends FilterBase { +051 private final byte[] family; +052 private final byte[] qualifier; +053 private final CompareOperator op; +054 private final ByteArrayComparable comparator; +055 +056 // This flag is used to speed up seeking cells when matched column is found, such that following +057 // columns in the same row can be skipped faster by NEXT_ROW instead of NEXT_COL. +058 private boolean columnFound = false; +059 +060 public ColumnValueFilter(final byte[] family, final byte[] qualifier, +061 final CompareOperator op, final byte[] value) { +062this(family, qualifier, op, new BinaryComparator(value)); +063 } +064 +065 public ColumnValueFilter(final byte[] family, final byte[] qualifier, +066 final CompareOperator op, +067 final ByteArrayComparable comparator) { +068this.family = Preconditions.checkNotNull(family, "family should not be null."); +069this.qualifier = qualifier == null ? new byte[0] : qualifier; +070this.op = Preconditions.checkNotNull(op, "CompareOperator should not be null"); +071this.comparator = Preconditions.checkNotNull(comparator, "Comparator should not be null"); +072 } +073 +074 /** +075 * @return operator +076 */ +077 public CompareOperator getCompareOperator() { +078return op; +079 } +080 +081 /** +082 * @return the comparator +083 */ +084 public ByteArrayComparable getComparator() { +085return comparator; +086 } +087 +088 /** +089 * @return the column family +090 */ +091 public byte[] getFamily() { +092return family; +093 } +094 +095 /** +096 * @return the qualifier +097 */ +098 public byte[] getQualifier() { +099return qualifier; +100 } +101 +102 @Override +103 public void reset() throws IOException { +104columnFound = false; +105 } +106 +107 @Override +108 public boolean filterRowKey(Cell cell) throws IOException { +109return false; +110 } +111 +112 @Override +113 public ReturnCode


[32/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
index f06bd9c..3341a8c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
@@ -1055,7 +1055,7 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getMaxKeyValueSize
-public static int getMaxKeyValueSize(org.apache.hadoop.conf.Configuration conf)
+public static int getMaxKeyValueSize(org.apache.hadoop.conf.Configuration conf)
 
 Returns:
 maxKeyValueSize from configuration.
@@ -1068,7 +1068,7 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getConfiguration
-public org.apache.hadoop.conf.Configuration getConfiguration()
+public org.apache.hadoop.conf.Configuration getConfiguration()
 Description copied from 
interface: Table
 Returns the Configuration object used by this 
instance.
  
@@ -1086,7 +1086,7 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getName
-public TableName getName()
+public TableName getName()
 Description copied from 
interface: Table
 Gets the fully qualified table name instance of this 
table.
 
@@ -1101,7 +1101,7 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getConnection
-protected Connection getConnection()
+protected Connection getConnection()
 INTERNAL Used by unit tests and tools to do 
low-level
  manipulations.
 
@@ -1117,7 +1117,7 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/c
 
 getTableDescriptor
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public HTableDescriptor getTableDescriptor()
+public HTableDescriptor getTableDescriptor()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Deprecated. 
 Description copied from 
interface: Table
@@ -1136,7 +1136,7 @@ public 
 
 getDescriptor
-public TableDescriptor getDescriptor()
+public TableDescriptor getDescriptor()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: Table
 Gets the table 
descriptor for this table.
@@ -1154,7 +1154,7 @@ public 
 
 getKeysAndRegionsInRange
-private PairList,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> getKeysAndRegionsInRange(byte[] startKey,
+private PairList,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> getKeysAndRegionsInRange(byte[] startKey,
   
byte[] endKey,
   
boolean includeEndKey)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -1180,7 +1180,7 @@ public 
 
 getKeysAndRegionsInRange
-private PairList,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> getKeysAndRegionsInRange(byte[] startKey,
+private PairList,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> getKeysAndRegionsInRange(byte[] startKey,
   
byte[] endKey,
   
boolean includeEndKey,
   
boolean reload)
@@ -1208,7 +1208,7 @@ public 
 
 getScanner
-public ResultScanner getScanner(Scan scan)
+public ResultScanner getScanner(Scan scan)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html

[34/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
index ea69217..a36892a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":9,"i47":10,"i48":10,"i49":10,"i50":10,"i51":42,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":9,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":9,"i48":10,"i49":10,"i50":10,"i51":10,"i52":42,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":9,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -403,172 +403,173 @@ implements 
+private void
+checkClosed() 
+
+
 void
 clearCaches(ServerName serverName)
 Clear any caches that pertain to server name 
sn.
 
 
-
+
 void
 clearRegionCache()
 Allows flushing the region cache.
 
 
-
+
 void
 clearRegionCache(TableName tableName)
 Allows flushing the region cache of all locations that 
pertain to
  tableName
 
 
-
+
 void
 clearRegionCache(TableName tableName,
 byte[] row) 
 
-
+
 void
 close() 
 
-
+
 private void
 closeMaster()
 Immediate close of the shared master.
 
 
-
+
 private void
 closeMasterService(ConnectionImplementation.MasterServiceState mss) 
 
-
+
 void
 deleteCachedRegionLocation(HRegionLocation location)
 Deletes cached locations for the specific region.
 
 
-
+
 protected void
 finalize()
 Close the connection for good.
 
 
-
+
 private static  T
 get(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture future) 
 
-
+
 Admin
 getAdmin()
 Retrieve an Admin implementation to administer an HBase 
cluster.
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 getAdmin(ServerName serverName)
 Establishes a connection to the region server at the 
specified address.
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 getAdminForMaster()
 Get the admin service for master.
 
 
-
+
 AsyncProcess
 getAsyncProcess() 
 
-
+
 ClientBackoffPolicy
 getBackoffPolicy() 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService
 getBatchPool() 
 
-
+
 BufferedMutator
 getBufferedMutator(BufferedMutatorParams params)
 Retrieve a BufferedMutator for performing 
client-side buffering of writes.
 
 
-
+
 BufferedMutator
 getBufferedMutator(TableName tableName)
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes.
 
 
-
+
 (package private) RegionLocations
 getCachedLocation(TableName tableName,
  byte[] row)
 Search the cache for a location that fits our table and row 
key.
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 getClient(ServerName serverName)
 Establishes a connection to the region server at the 
specified address, and returns
  a region client protocol.
 
 
-
+
 org.apache.hadoop.conf.Configuration
 getConfiguration() 
 
-
+
 ConnectionConfiguration
 getConnectionConfiguration() 
 
-
+
 MetricsConnection
 getConnectionMetrics() 
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-extern

[35/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html 
b/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
index 70e44ee..dc82a5f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":38,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":38,"i24":38,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":38,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public interface ClusterConnection
+public interface ClusterConnection
 extends Connection
 Internal methods on Connection that should not be used by 
user code.
 
@@ -224,29 +224,21 @@ extends 
 
 MasterKeepAliveConnection
-getKeepAliveMasterService()
-Deprecated. 
-Since 0.96.0
-
-
-
-
-org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface
 getMaster()
 Returns a MasterKeepAliveConnection to 
the active master
 
 
-
+
 RpcRetryingCallerFactory
 getNewRpcRetryingCallerFactory(org.apache.hadoop.conf.Configuration conf)
 Returns a new RpcRetryingCallerFactory from the given 
Configuration.
 
 
-
+
 NonceGenerator
 getNonceGenerator() 
 
-
+
 HRegionLocation
 getRegionLocation(TableName tableName,
  byte[] row,
@@ -254,37 +246,29 @@ extends Find region location hosting passed row
 
 
-
+
 RpcControllerFactory
 getRpcControllerFactory() 
 
-
+
 RpcRetryingCallerFactory
 getRpcRetryingCallerFactory() 
 
-
+
 ServerStatisticTracker
 getStatisticsTracker() 
 
-
+
 TableState
 getTableState(TableName tableName)
 Retrieve TableState, represent current table state.
 
 
-
+
 boolean
 hasCellBlockSupport() 
 
-
-boolean
-isDeadServer(ServerName serverName)
-Deprecated. 
-internal method, do not 
use thru ClusterConnection
-
-
-
-
+
 boolean
 isMasterRunning()
 Deprecated. 
@@ -292,7 +276,7 @@ extends 
 
 
-
+
 boolean
 isTableAvailable(TableName tableName,
 byte[][] splitKeys)
@@ -300,24 +284,24 @@ extends 
 
 
-
+
 boolean
 isTableDisabled(TableName tableName) 
 
-
+
 boolean
 isTableEnabled(TableName tableName)
 A table that isTableEnabled == false and isTableDisabled == 
false
  is possible.
 
 
-
+
 HRegionLocation
 locateRegion(byte[] regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
 locateRegion(TableName tableName,
 byte[] row)
@@ -325,14 +309,14 @@ extends 
 
 
-
+
 RegionLocations
 locateRegion(TableName tableName,
 byte[] row,
 boolean useCache,
 boolean retry) 
 
-
+
 RegionLocations
 locateRegion(TableName tableName,
 byte[] row,
@@ -340,13 +324,13 @@ extends  
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 locateRegions(TableName tableName)
 Gets the locations of all regions in the specified table, 
tableName.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 locateRegions(TableName tableName,
  boolean useCache,
@@ -354,7 +338,7 @@ extends Gets the locations of all regions in the specified table, 
tableName.
 
 
-
+
 HRegionLocation
 relocateRegion(TableName tableName,
   byte[] row)
@@ -362,7 +346,7 @@ extends 
 
 
-
+
 RegionLocations
 relocateRegion(TableName tableName,
   byte[] row,
@@ -371,7 +355,7 @@ extends 
 
 
-
+
 void
 updateCachedLocations(TableName tableName,
  byte[] regionName,
@@ -416,7 +400,7 @@ extends 
 
 HBASE_CLIENT_CONNECTION_IMPL
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_CLIENT_CONNECTION_IMPL
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_CLIENT_CONNECTION_IMPL
 Key for configuration in Configuration whose value is the 
class we imp

[31/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
index bb9f461..3c505c9 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-abstract class MasterCallable
+abstract class MasterCallable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RetryingCallable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 A RetryingCallable for Master RPC operations.
@@ -282,7 +282,7 @@ implements 
 
 connection
-protected final ClusterConnection connection
+protected final ClusterConnection connection
 
 
 
@@ -291,7 +291,7 @@ implements 
 
 master
-protected MasterKeepAliveConnection master
+protected MasterKeepAliveConnection master
 
 
 
@@ -300,7 +300,7 @@ implements 
 
 rpcController
-private final HBaseRpcController rpcController
+private final HBaseRpcController rpcController
 
 
 
@@ -317,7 +317,7 @@ implements 
 
 MasterCallable
-MasterCallable(Connection connection,
+MasterCallable(Connection connection,
RpcControllerFactory rpcConnectionFactory)
 
 
@@ -335,7 +335,7 @@ implements 
 
 prepare
-public void prepare(boolean reload)
+public void prepare(boolean reload)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: RetryingCallable
 Prepare by setting up any connections to servers, etc., 
ahead of call invocation.
@@ -356,7 +356,7 @@ implements 
 
 close
-public void close()
+public void close()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -374,7 +374,7 @@ implements 
 
 throwable
-public void throwable(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t,
+public void throwable(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t,
   boolean retrying)
 Description copied from 
interface: RetryingCallable
 Called when call throws an exception and we are going to 
retry; take action to
@@ -396,7 +396,7 @@ implements 
 
 getExceptionMessageAdditionalDetail
-public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getExceptionMessageAdditionalDetail()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getExceptionMessageAdditionalDetail()
 
 Specified by:
 getExceptionMessageAdditionalDetail in
 interface RetryingCallable
@@ -413,7 +413,7 @@ implements 
 
 sleep
-public long sleep(long pause,
+public long sleep(long pause,
   int tries)
 
 Specified by:
@@ -432,7 +432,7 @@ implements 
 
 call
-public V call(int callTimeout)
+public V call(int callTimeout)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Override that changes the http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true#call--";
 title="class or interface in 
java.util.concurrent">Callable.call() Exception from http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception to
  http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException. It also 
does setup of an rpcController and calls through to the rpcCall()
@@ -456,7 +456,7 @@ implements 
 
 rpcCall
-protected abstract V rpcCall()
+protected abstract V rpcCall()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 Run the RPC call. Implement this method. To get at the 
rpcController that has been created
  and configured to make this rpc call, use getRpcController(). We are trying 
to contain
@@ -475,7 +475,7 @@ implements 
 
 getRpcController
-HBaseRpcController getRpcController()
+HBaseRpcController getRpcController()
 
 
 
@@ -484,7 +484,7 @@ implements 
 
 setPriority
-void setPriority(int priority)

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
index 3152619..27db368 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
@@ -39,29 +39,29 @@
 031import 
java.util.concurrent.ExecutorService;
 032import 
java.util.concurrent.ThreadLocalRandom;
 033import java.util.concurrent.TimeUnit;
-034
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.hbase.Cell;
-037import 
org.apache.hadoop.hbase.CellComparator;
-038import 
org.apache.hadoop.hbase.HConstants;
-039import 
org.apache.hadoop.hbase.MasterNotRunningException;
-040import 
org.apache.hadoop.hbase.PrivateCellUtil;
-041import 
org.apache.hadoop.hbase.ServerName;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-044import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-045import 
org.apache.hadoop.hbase.security.User;
-046import 
org.apache.hadoop.hbase.security.UserProvider;
-047import 
org.apache.hadoop.hbase.util.Bytes;
-048import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-049import 
org.apache.hadoop.ipc.RemoteException;
-050import org.apache.hadoop.net.DNS;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import org.slf4j.Logger;
-053import org.slf4j.LoggerFactory;
-054import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-055import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-056import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+034import 
org.apache.hadoop.conf.Configuration;
+035import org.apache.hadoop.hbase.Cell;
+036import 
org.apache.hadoop.hbase.CellComparator;
+037import 
org.apache.hadoop.hbase.HConstants;
+038import 
org.apache.hadoop.hbase.PrivateCellUtil;
+039import 
org.apache.hadoop.hbase.ServerName;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+042import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+043import 
org.apache.hadoop.hbase.security.User;
+044import 
org.apache.hadoop.hbase.security.UserProvider;
+045import 
org.apache.hadoop.hbase.util.Bytes;
+046import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+047import 
org.apache.hadoop.ipc.RemoteException;
+048import org.apache.hadoop.net.DNS;
+049import 
org.apache.yetus.audience.InterfaceAudience;
+050import org.slf4j.Logger;
+051import org.slf4j.LoggerFactory;
+052
+053import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+054import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+055import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+056
 057import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
 058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
@@ -163,11 +163,11 @@
 155}
 156
 157@Override
-158public MasterKeepAliveConnection 
getKeepAliveMasterService() throws MasterNotRunningException {
+158public MasterKeepAliveConnection 
getMaster() throws IOException {
 159  if (this.localHostClient instanceof 
MasterService.BlockingInterface) {
 160return new 
ShortCircuitMasterConnection((MasterService.BlockingInterface)this.localHostClient);
 161  }
-162  return 
super.getKeepAliveMasterService();
+162  return super.getMaster();
 163}
 164  }
 165

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
index 3152619..27db368 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
@@ -39,29 +39,29 @@
 031import 
java.util.concurrent.ExecutorService;
 032import 
java.util.concurrent.ThreadLocalRandom;
 033import java.util.concurrent.TimeUnit;
-034
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.hbase.Cell;
-037import 
org.apache.hadoop.hbase.CellCom

[40/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 139d7d6..2cc01cb 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2018 The Apache Software Foundation
 
-  File: 3544,
- Errors: 16537,
+  File: 3550,
+ Errors: 16524,
  Warnings: 0,
  Infos: 0
   
@@ -18414,6 +18414,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.StoppableThread.java";>org/apache/hadoop/hbase/procedure2/StoppableThread.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.StripeCompactionsPerformanceEvaluation.java";>org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
 
 
@@ -27719,7 +27733,7 @@ under the License.
   0
 
 
-  25
+  24
 
   
   
@@ -28536,6 +28550,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.InlineChore.java";>org/apache/hadoop/hbase/procedure2/InlineChore.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.HBaseRpcControllerImpl.java";>org/apache/hadoop/hbase/ipc/HBaseRpcControllerImpl.java
 
 
@@ -29259,7 +29287,7 @@ under the License.
   0
 
 
-  3
+  2
 
   
   
@@ -32050,6 +32078,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.TimeoutExecutorThread.java";>org/apache/hadoop/hbase/procedure2/TimeoutExecutorThread.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.PriorityFunction.java";>org/apache/hadoop/hbase/ipc/PriorityFunction.java
 
 
@@ -37412,6 +37454,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.DelayedProcedure.java";>org/apache/hadoop/hbase/procedure2/DelayedProcedure.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.TimeRange.java";>org/apache/hadoop/hbase/io/TimeRange.java
 
 
@@ -37533,7 +37589,7 @@ under the License.
   0
 
 
-  48
+  47
 
   
   
@@ -42041,7 +42097,7 @@ under the License.
   0
 
 
-  29
+  20
 
   
   
@@ -43973,7 +44029,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -44454,6 +44510,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.filter.ColumnValueFilter.java";>org/apache/hadoop/hbase/filter/ColumnValueFilter.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  

hbase git commit: HBASE-20021 TestFromClientSideWithCoprocessor is flakey

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 34d3e847c -> 0152d5e2a


HBASE-20021 TestFromClientSideWithCoprocessor is flakey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0152d5e2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0152d5e2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0152d5e2

Branch: refs/heads/branch-2
Commit: 0152d5e2aec4dd81afbbeefde00f1941c3579649
Parents: 34d3e84
Author: zhangduo 
Authored: Tue Feb 20 20:07:27 2018 +0800
Committer: Michael Stack 
Committed: Tue Feb 20 06:24:22 2018 -0800

--
 .../hadoop/hbase/client/TestFromClientSide.java| 17 ++---
 .../client/TestFromClientSideWithCoprocessor.java  | 17 +
 2 files changed, 15 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0152d5e2/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index b028880..29d3439 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -139,24 +139,27 @@ public class TestFromClientSide {
   @Rule
   public TestName name = new TestName();
 
-  @BeforeClass
-  public static void setUpBeforeClass() throws Exception {
+  protected static final void initialize(Class... cps) throws Exception {
 // Uncomment the following lines if more verbosity is needed for
 // debugging (see HBASE-12285 for details).
-//((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
-//((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
-//((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
+// ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+// ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
+// ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
 // make sure that we do not get the same ts twice, see HBASE-19731 for 
more details.
 EnvironmentEdgeManager.injectEdge(new NonRepeatedEnvironmentEdge());
 Configuration conf = TEST_UTIL.getConfiguration();
 conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
-MultiRowMutationEndpoint.class.getName());
+  Arrays.stream(cps).map(Class::getName).toArray(String[]::new));
 conf.setBoolean("hbase.table.sanity.checks", true); // enable for below 
tests
-conf.setLong(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 600);
 // We need more than one region server in this test
 TEST_UTIL.startMiniCluster(SLAVES);
   }
 
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+initialize(MultiRowMutationEndpoint.class);
+  }
+
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
 TEST_UTIL.shutdownMiniCluster();

http://git-wip-us.apache.org/repos/asf/hbase/blob/0152d5e2/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
index 5bf70c4..37d0135 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
@@ -17,9 +17,7 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
 import org.apache.hadoop.hbase.regionserver.NoOpScanPolicyObserver;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -29,23 +27,18 @@ import org.junit.ClassRule;
 import org.junit.experimental.categories.Category;
 
 /**
- * Test all client operations with a coprocessor that
- * just implements the default flush/compact/scan policy.
+ * Test all client operations with a coprocessor that just implements the 
default flush/compact/scan
+ * policy.
  */
-@Category({LargeTests.class, ClientTests.class})
+@Category({ LargeTests.class, ClientTests.class })
 public class TestFromClientSideWithCoprocessor extends TestFromClientSide {
 
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
- 

hbase git commit: HBASE-20021 TestFromClientSideWithCoprocessor is flakey

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master b7685307e -> 69d2becc7


HBASE-20021 TestFromClientSideWithCoprocessor is flakey


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/69d2becc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/69d2becc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/69d2becc

Branch: refs/heads/master
Commit: 69d2becc73deeb0217a816fe923d4bcd70f3d65f
Parents: b768530
Author: zhangduo 
Authored: Tue Feb 20 20:07:27 2018 +0800
Committer: Michael Stack 
Committed: Tue Feb 20 06:24:42 2018 -0800

--
 .../hadoop/hbase/client/TestFromClientSide.java| 17 ++---
 .../client/TestFromClientSideWithCoprocessor.java  | 17 +
 2 files changed, 15 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/69d2becc/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index b028880..29d3439 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -139,24 +139,27 @@ public class TestFromClientSide {
   @Rule
   public TestName name = new TestName();
 
-  @BeforeClass
-  public static void setUpBeforeClass() throws Exception {
+  protected static final void initialize(Class... cps) throws Exception {
 // Uncomment the following lines if more verbosity is needed for
 // debugging (see HBASE-12285 for details).
-//((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
-//((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
-//((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
+// ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+// ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
+// ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
 // make sure that we do not get the same ts twice, see HBASE-19731 for 
more details.
 EnvironmentEdgeManager.injectEdge(new NonRepeatedEnvironmentEdge());
 Configuration conf = TEST_UTIL.getConfiguration();
 conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
-MultiRowMutationEndpoint.class.getName());
+  Arrays.stream(cps).map(Class::getName).toArray(String[]::new));
 conf.setBoolean("hbase.table.sanity.checks", true); // enable for below 
tests
-conf.setLong(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 600);
 // We need more than one region server in this test
 TEST_UTIL.startMiniCluster(SLAVES);
   }
 
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+initialize(MultiRowMutationEndpoint.class);
+  }
+
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
 TEST_UTIL.shutdownMiniCluster();

http://git-wip-us.apache.org/repos/asf/hbase/blob/69d2becc/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
index 5bf70c4..37d0135 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
@@ -17,9 +17,7 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
 import org.apache.hadoop.hbase.regionserver.NoOpScanPolicyObserver;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -29,23 +27,18 @@ import org.junit.ClassRule;
 import org.junit.experimental.categories.Category;
 
 /**
- * Test all client operations with a coprocessor that
- * just implements the default flush/compact/scan policy.
+ * Test all client operations with a coprocessor that just implements the 
default flush/compact/scan
+ * policy.
  */
-@Category({LargeTests.class, ClientTests.class})
+@Category({ LargeTests.class, ClientTests.class })
 public class TestFromClientSideWithCoprocessor extends TestFromClientSide {
 
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
- 

hbase git commit: HBASE-20020 Make sure we throw DoNotRetryIOException when ConnectionImplementation is closed

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 391790ddb -> b7685307e


HBASE-20020 Make sure we throw DoNotRetryIOException when 
ConnectionImplementation is closed


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b7685307
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b7685307
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b7685307

Branch: refs/heads/master
Commit: b7685307e4cc72035fc1b8737faec2e06a7c97df
Parents: 391790d
Author: zhangduo 
Authored: Tue Feb 20 16:50:03 2018 +0800
Committer: Michael Stack 
Committed: Tue Feb 20 06:12:13 2018 -0800

--
 .../hadoop/hbase/MasterNotRunningException.java |  4 +-
 .../hadoop/hbase/client/ClusterConnection.java  | 30 ++-
 .../hbase/client/ConnectionImplementation.java  | 86 +---
 .../hadoop/hbase/client/ConnectionUtils.java|  8 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |  5 +-
 .../hadoop/hbase/client/MasterCallable.java | 11 ++-
 .../hbase/client/TestSnapshotFromAdmin.java |  4 +-
 .../hbase/client/TestHBaseAdminNoCluster.java   |  4 +-
 8 files changed, 59 insertions(+), 93 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b7685307/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
index 1ff17ac..35cdecb 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
@@ -18,15 +18,13 @@
  */
 package org.apache.hadoop.hbase;
 
-import java.io.IOException;
-
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Thrown if the master is not running
  */
 @InterfaceAudience.Public
-public class MasterNotRunningException extends IOException {
+public class MasterNotRunningException extends HBaseIOException {
   private static final long serialVersionUID = (1L << 23) - 1L;
   /** default constructor */
   public MasterNotRunningException() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b7685307/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
index 7294559..3e055b0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.MasterNotRunningException;
@@ -29,12 +28,12 @@ import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.yetus.audience.InterfaceAudience;
+
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
 
 /** Internal methods on Connection that should not be used by user code. */
 @InterfaceAudience.Private
@@ -224,7 +223,7 @@ public interface ClusterConnection extends Connection {
   /**
* Returns a {@link MasterKeepAliveConnection} to the active master
*/
-  MasterService.BlockingInterface getMaster() throws IOException;
+  MasterKeepAliveConnection getMaster() throws IOException;
 
   /**
* Get the admin service for master.
@@ -258,9 +257,8 @@ public interface ClusterConnection extends Connection {
* @return Location of row.
* @throws IOException if a remote or network exception occurs
*/
-  HRegionLocation getRegionLocation(TableName tableName, byte [] row,
-boolean reload)
-  throws IOException;
+  HRegionLocation getRegionLocation(TableName tableName, byte[] row, boolean 
reload)
+  throws IOException;
 
   /**
* Clear any caches that pertain to server name sn.
@@ -269,24 +267,6 @@ public interface ClusterConnection extends Connection {
   vo

hbase git commit: HBASE-20020 Make sure we throw DoNotRetryIOException when ConnectionImplementation is closed

2018-02-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 c1fe9f441 -> 34d3e847c


HBASE-20020 Make sure we throw DoNotRetryIOException when 
ConnectionImplementation is closed


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/34d3e847
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/34d3e847
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/34d3e847

Branch: refs/heads/branch-2
Commit: 34d3e847cc0190e1d1893460995835b9be9e9d7e
Parents: c1fe9f4
Author: zhangduo 
Authored: Tue Feb 20 16:50:03 2018 +0800
Committer: Michael Stack 
Committed: Tue Feb 20 06:10:45 2018 -0800

--
 .../hadoop/hbase/MasterNotRunningException.java |  4 +-
 .../hadoop/hbase/client/ClusterConnection.java  | 30 ++-
 .../hbase/client/ConnectionImplementation.java  | 86 +---
 .../hadoop/hbase/client/ConnectionUtils.java|  8 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |  5 +-
 .../hadoop/hbase/client/MasterCallable.java | 11 ++-
 .../hbase/client/TestSnapshotFromAdmin.java |  4 +-
 .../hbase/client/TestHBaseAdminNoCluster.java   |  4 +-
 8 files changed, 59 insertions(+), 93 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/34d3e847/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
index 1ff17ac..35cdecb 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
@@ -18,15 +18,13 @@
  */
 package org.apache.hadoop.hbase;
 
-import java.io.IOException;
-
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Thrown if the master is not running
  */
 @InterfaceAudience.Public
-public class MasterNotRunningException extends IOException {
+public class MasterNotRunningException extends HBaseIOException {
   private static final long serialVersionUID = (1L << 23) - 1L;
   /** default constructor */
   public MasterNotRunningException() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/34d3e847/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
index 7294559..3e055b0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.MasterNotRunningException;
@@ -29,12 +28,12 @@ import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.yetus.audience.InterfaceAudience;
+
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
 
 /** Internal methods on Connection that should not be used by user code. */
 @InterfaceAudience.Private
@@ -224,7 +223,7 @@ public interface ClusterConnection extends Connection {
   /**
* Returns a {@link MasterKeepAliveConnection} to the active master
*/
-  MasterService.BlockingInterface getMaster() throws IOException;
+  MasterKeepAliveConnection getMaster() throws IOException;
 
   /**
* Get the admin service for master.
@@ -258,9 +257,8 @@ public interface ClusterConnection extends Connection {
* @return Location of row.
* @throws IOException if a remote or network exception occurs
*/
-  HRegionLocation getRegionLocation(TableName tableName, byte [] row,
-boolean reload)
-  throws IOException;
+  HRegionLocation getRegionLocation(TableName tableName, byte[] row, boolean 
reload)
+  throws IOException;
 
   /**
* Clear any caches that pertain to server name sn.
@@ -269,24 +267,6 @@ public interface ClusterConnection extends Connection {