svn commit: r28951 - /dev/hbase/hbase-1.4.7RC0/

2018-08-24 Thread apurtell
Author: apurtell
Date: Fri Aug 24 23:39:25 2018
New Revision: 28951

Log:
Remove HBase 1.4.7 RC0 artifacts, not ready yet for voting

Removed:
dev/hbase/hbase-1.4.7RC0/



svn commit: r28949 - in /dev/hbase/hbase-1.4.7RC0: ./ compat-report.html hbase-1.4.7-bin.tar.gz hbase-1.4.7-bin.tar.gz.asc hbase-1.4.7-bin.tar.gz.sha512 hbase-1.4.7-src.tar.gz hbase-1.4.7-src.tar.gz.a

2018-08-24 Thread apurtell
Author: apurtell
Date: Fri Aug 24 23:31:41 2018
New Revision: 28949

Log:
Stage HBase 1.4.7RC0

Added:
dev/hbase/hbase-1.4.7RC0/
dev/hbase/hbase-1.4.7RC0/compat-report.html
dev/hbase/hbase-1.4.7RC0/hbase-1.4.7-bin.tar.gz   (with props)
dev/hbase/hbase-1.4.7RC0/hbase-1.4.7-bin.tar.gz.asc
dev/hbase/hbase-1.4.7RC0/hbase-1.4.7-bin.tar.gz.sha512
dev/hbase/hbase-1.4.7RC0/hbase-1.4.7-src.tar.gz   (with props)
dev/hbase/hbase-1.4.7RC0/hbase-1.4.7-src.tar.gz.asc
dev/hbase/hbase-1.4.7RC0/hbase-1.4.7-src.tar.gz.sha512

Added: dev/hbase/hbase-1.4.7RC0/compat-report.html
==
--- dev/hbase/hbase-1.4.7RC0/compat-report.html (added)
+++ dev/hbase/hbase-1.4.7RC0/compat-report.html Fri Aug 24 23:31:41 2018
@@ -0,0 +1,466 @@
+
+
+http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
+http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
+
+
+
+
+
+
+hbase: rel/1.4.6 to 1.4.7RC0 compatibility report
+
+body {
+font-family:Arial, sans-serif;
+background-color:White;
+color:Black;
+}
+hr {
+color:Black;
+background-color:Black;
+height:1px;
+border:0;
+}
+h1 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.625em;
+}
+h2 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.25em;
+white-space:nowrap;
+}
+div.symbols {
+color:#003E69;
+}
+div.symbols i {
+color:Brown;
+}
+span.section {
+font-weight:bold;
+cursor:pointer;
+color:#003E69;
+white-space:nowrap;
+margin-left:0.3125em;
+}
+span:hover.section {
+color:#336699;
+}
+span.sect_aff {
+cursor:pointer;
+padding-left:1.55em;
+font-size:0.875em;
+color:#cc3300;
+}
+span.ext {
+font-weight:normal;
+}
+span.jar {
+color:#cc3300;
+font-size:0.875em;
+font-weight:bold;
+}
+div.jar_list {
+padding-left:0.4em;
+font-size:0.94em;
+}
+span.pkg_t {
+color:#408080;
+font-size:0.875em;
+}
+span.pkg {
+color:#408080;
+font-size:0.875em;
+font-weight:bold;
+}
+span.cname {
+color:Green;
+font-size:0.875em;
+font-weight:bold;
+}
+span.iname_b {
+font-weight:bold;
+}
+span.iname_a {
+color:#33;
+font-weight:bold;
+font-size:0.94em;
+}
+span.sym_p {
+font-weight:normal;
+white-space:normal;
+}
+span.sym_pd {
+white-space:normal;
+}
+span.sym_p span, span.sym_pd span {
+white-space:nowrap;
+}
+span.attr {
+color:Black;
+font-weight:normal;
+}
+span.deprecated {
+color:Red;
+font-weight:bold;
+font-family:Monaco, monospace;
+}
+div.affect {
+padding-left:1em;
+padding-bottom:10px;
+font-size:0.87em;
+font-style:italic;
+line-height:0.9em;
+}
+div.affected {
+padding-left:2em;
+padding-top:10px;
+}
+table.ptable {
+border-collapse:collapse;
+border:1px outset black;
+margin-left:0.95em;
+margin-top:3px;
+margin-bottom:3px;
+width:56.25em;
+}
+table.ptable td {
+border:1px solid Gray;
+padding:3px;
+font-size:0.875em;
+text-align:left;
+vertical-align:top;
+max-width:28em;
+word-wrap:break-word;
+}
+table.ptable th {
+background-color:#ee;
+font-weight:bold;
+color:#33;
+font-family:Verdana, Arial;
+font-size:0.875em;
+border:1px solid Gray;
+text-align:center;
+vertical-align:top;
+white-space:nowrap;
+padding:3px;
+}
+table.summary {
+border-collapse:collapse;
+border:1px outset black;
+}
+table.summary th {
+background-color:#ee;
+font-weight:normal;
+text-align:left;
+font-size:0.94em;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px;
+}
+table.summary td {
+text-align:right;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px 5px 3px 10px;
+}
+span.mngl {
+padding-left:1em;
+font-size:0.875em;
+cursor:text;
+color:#44;
+font-weight:bold;
+}
+span.pleft {
+padding-left:2.5em;
+}
+span.color_p {
+font-style:italic;
+color:Brown;
+}
+span.param {
+font-style:italic;
+}
+span.focus_p {
+font-style:italic;
+background-color:#DCDCDC;
+}
+span.ttype {
+font-weight:normal;
+}
+span.nowrap {
+white-space:nowrap;
+}
+span.value {
+white-space:nowrap;
+font-weight:bold;
+}
+.passed {
+background-color:#CCFFCC;
+font-weight:normal;
+}
+.warning {
+background-color:#F4F4AF;
+font-weight:normal;
+}
+.failed {
+background-color:#FF;
+font-weight:normal;
+}
+.new {
+background-color:#C6DEFF;
+font-weight:normal;
+}
+
+.compatible {
+background-color:#CCFFCC;
+font-weight:normal;
+}
+.almost_compatible {
+background-color:#FFDAA3;
+font-weight:normal;
+}
+.incompatible {
+background-color:#FF;
+font-weight:normal;
+}
+.gray {
+background-color:#DCDCDC;
+font-weight:normal;
+}
+
+.top_ref {
+font-size:0.69em;
+}
+.footer {
+

hbase git commit: HBASE-21078 [amv2] CODE-BUG NPE in RTP doing Unassign

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 e26ca63f8 -> d954031d5


HBASE-21078 [amv2] CODE-BUG NPE in RTP doing Unassign


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d954031d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d954031d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d954031d

Branch: refs/heads/branch-2.1
Commit: d954031d50c6234d0942cf0f05879aebe42b3345
Parents: e26ca63
Author: Michael Stack 
Authored: Tue Aug 21 21:06:14 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 13:22:16 2018 -0700

--
 .../hadoop/hbase/procedure2/Procedure.java  |   5 +-
 .../hbase/procedure2/ProcedureExecutor.java |   3 +-
 .../master/assignment/MoveRegionProcedure.java  |  20 ++-
 .../master/assignment/UnassignProcedure.java|  23 ++-
 .../hbase/regionserver/HRegionServer.java   |   1 -
 .../master/assignment/TestRegionMove2.java  | 174 +++
 6 files changed, 214 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d954031d/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index 83a91fd..2d30388 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -421,7 +421,10 @@ public abstract class Procedure implements 
Comparablehttp://git-wip-us.apache.org/repos/asf/hbase/blob/d954031d/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 695c7b0..f773bf9 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -1725,7 +1725,8 @@ public class ProcedureExecutor {
   // children have completed, move parent to front of the queue.
   store.update(parent);
   scheduler.addFront(parent);
-  LOG.info("Finished subprocedure(s) of " + parent + "; resume parent 
processing.");
+  LOG.info("Finished subprocedure pid={}, resume processing parent {}",
+  procedure.getProcId(), parent);
   return;
 }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d954031d/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
index 6135ce1..968f5f1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
@@ -24,17 +24,19 @@ import java.io.IOException;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.DoNotRetryRegionException;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import 
org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProcedure;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionState;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Procedure that implements a RegionPlan.
@@ -55,6 +57,11 @@ public class MoveRegionProcedure extends 

hbase git commit: HBASE-21078 [amv2] CODE-BUG NPE in RTP doing Unassign

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 6047fb05f -> a83073aff


HBASE-21078 [amv2] CODE-BUG NPE in RTP doing Unassign


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a83073af
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a83073af
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a83073af

Branch: refs/heads/branch-2
Commit: a83073aff0f2d4c02da051330a97444211cdc575
Parents: 6047fb0
Author: Michael Stack 
Authored: Tue Aug 21 21:06:14 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 13:22:45 2018 -0700

--
 .../hadoop/hbase/procedure2/Procedure.java  |   5 +-
 .../hbase/procedure2/ProcedureExecutor.java |   3 +-
 .../master/assignment/MoveRegionProcedure.java  |  20 ++-
 .../master/assignment/UnassignProcedure.java|  23 ++-
 .../hbase/regionserver/HRegionServer.java   |   1 -
 .../master/assignment/TestRegionMove2.java  | 174 +++
 6 files changed, 214 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a83073af/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index 58757bb..896cc40 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -421,7 +421,10 @@ public abstract class Procedure implements 
Comparablehttp://git-wip-us.apache.org/repos/asf/hbase/blob/a83073af/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 464eaeb..27bdb9e 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -1668,7 +1668,8 @@ public class ProcedureExecutor {
   // children have completed, move parent to front of the queue.
   store.update(parent);
   scheduler.addFront(parent);
-  LOG.info("Finished subprocedure(s) of " + parent + "; resume parent 
processing.");
+  LOG.info("Finished subprocedure pid={}, resume processing parent {}",
+  procedure.getProcId(), parent);
   return;
 }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a83073af/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
index 6135ce1..968f5f1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
@@ -24,17 +24,19 @@ import java.io.IOException;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.DoNotRetryRegionException;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import 
org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProcedure;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionState;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Procedure that implements a RegionPlan.
@@ -55,6 +57,11 @@ public class MoveRegionProcedure extends 

hbase git commit: HBASE-21078 [amv2] CODE-BUG NPE in RTP doing Unassign

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 5c130fc75 -> 2e885195a


HBASE-21078 [amv2] CODE-BUG NPE in RTP doing Unassign


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2e885195
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2e885195
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2e885195

Branch: refs/heads/branch-2.0
Commit: 2e885195a251a4a45f7f691571b978552f7effd5
Parents: 5c130fc
Author: Michael Stack 
Authored: Tue Aug 21 21:06:14 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 13:21:47 2018 -0700

--
 .../hadoop/hbase/procedure2/Procedure.java  |   5 +-
 .../hbase/procedure2/ProcedureExecutor.java |   3 +-
 .../master/assignment/MoveRegionProcedure.java  |  20 ++-
 .../master/assignment/UnassignProcedure.java|  23 ++-
 .../hbase/regionserver/HRegionServer.java   |   1 -
 .../master/assignment/TestRegionMove2.java  | 174 +++
 6 files changed, 214 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2e885195/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index 83a91fd..2d30388 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -421,7 +421,10 @@ public abstract class Procedure implements 
Comparablehttp://git-wip-us.apache.org/repos/asf/hbase/blob/2e885195/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 1e0ee79..aa6e757 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -1725,7 +1725,8 @@ public class ProcedureExecutor {
   // children have completed, move parent to front of the queue.
   store.update(parent);
   scheduler.addFront(parent);
-  LOG.info("Finished subprocedure(s) of " + parent + "; resume parent 
processing.");
+  LOG.info("Finished subprocedure pid={}, resume processing parent {}",
+  procedure.getProcId(), parent);
   return;
 }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e885195/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
index 6135ce1..968f5f1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
@@ -24,17 +24,19 @@ import java.io.IOException;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.DoNotRetryRegionException;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import 
org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProcedure;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionState;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Procedure that implements a RegionPlan.
@@ -55,6 +57,11 @@ public class MoveRegionProcedure extends 

hbase git commit: HBASE-21113 Apply the branch-2 version of HBASE-21095, The timeout retry logic for several procedures are broken after master restarts

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 19b722e84 -> 5c130fc75


HBASE-21113 Apply the branch-2 version of HBASE-21095, The timeout retry
logic for several procedures are broken after master restarts

I applied the patch HBASE-21095 and then reverted it so could apply the
patch as HBASE-21113 (by reverting the HBASE-21095 revert but pushing
with this message!).

Revert "Revert "HBASE-21095 The timeout retry logic for several procedures are 
broken after master restarts""

This reverts commit 19b722e8412b48d6318a9599b702995e99099d7e.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5c130fc7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5c130fc7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5c130fc7

Branch: refs/heads/branch-2.0
Commit: 5c130fc75705525f75d519481b0c5dbc533a8082
Parents: 19b722e
Author: Michael Stack 
Authored: Fri Aug 24 12:36:25 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:36:25 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 ---
 .../assignment/TestUnexpectedStateException.java  | 18 ++
 2 files changed, 18 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5c130fc7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index de39f4c..c9d141a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -371,12 +371,9 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  getRegionState(env).getProcedureEvent().suspend();
-  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
-setTimeout(Math.toIntExact(backoff));
-setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-throw new ProcedureSuspendedException();
-  }
+  setTimeout(Math.toIntExact(backoff));
+  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+  throw new ProcedureSuspendedException();
 }
 
 return new Procedure[] {this};
@@ -394,7 +391,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
+env.getProcedureScheduler().addFront(this);
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/5c130fc7/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 0f62f8e..16648c0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,17 +24,15 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ 

hbase git commit: Revert "Revert "HBASE-21095 The timeout retry logic for several procedures are broken after master restarts""

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 4978db810 -> e26ca63f8


Revert "Revert "HBASE-21095 The timeout retry logic for several procedures are 
broken after master restarts""

HBASE-21113 Apply the branch-2 version of HBASE-21095, The timeout retry
logic for several procedures are broken after master restarts

I applied the patch HBASE-21095 and then reverted it so could apply the
patch as HBASE-21113 (by reverting the HBASE-21095 revert but pushing
with this message!).

This reverts commit 4978db81028f3a955932589af77599b53d909a46.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e26ca63f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e26ca63f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e26ca63f

Branch: refs/heads/branch-2.1
Commit: e26ca63f8801440f242560c81e5135ec809440f1
Parents: 4978db8
Author: Michael Stack 
Authored: Fri Aug 24 12:35:29 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:35:29 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 ---
 .../assignment/TestUnexpectedStateException.java  | 18 ++
 2 files changed, 18 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e26ca63f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 0db8676..c10bf2d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -364,12 +364,9 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  getRegionState(env).getProcedureEvent().suspend();
-  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
-setTimeout(Math.toIntExact(backoff));
-setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-throw new ProcedureSuspendedException();
-  }
+  setTimeout(Math.toIntExact(backoff));
+  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+  throw new ProcedureSuspendedException();
 }
 
 return new Procedure[] {this};
@@ -387,7 +384,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
+env.getProcedureScheduler().addFront(this);
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e26ca63f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 0f62f8e..16648c0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,17 +24,15 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ 

hbase git commit: HBASE-21113 Apply the branch-2 version of HBASE-21095, The timeout retry logic for several procedures are broken after master restarts

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 a220566b9 -> 6047fb05f


HBASE-21113 Apply the branch-2 version of HBASE-21095, The timeout retry logic 
for several procedures are broken after master restarts

I applied the patch HBASE-21095 and then reverted it so could apply the
patch as HBASE-21113 (by reverting the HBASE-21095 revert but pushing
with this message!).

Revert "Revert "HBASE-21095 The timeout retry logic for several procedures are 
broken after master restarts""

This reverts commit a220566b9875615e92a848d109b17fcfc1566b19.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6047fb05
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6047fb05
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6047fb05

Branch: refs/heads/branch-2
Commit: 6047fb05f51fa4a1551157ac498b8d86ff4f7c2b
Parents: a220566
Author: Michael Stack 
Authored: Fri Aug 24 12:32:33 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:32:33 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 ---
 .../assignment/TestUnexpectedStateException.java  | 18 ++
 2 files changed, 18 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6047fb05/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 0db8676..c10bf2d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -364,12 +364,9 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  getRegionState(env).getProcedureEvent().suspend();
-  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
-setTimeout(Math.toIntExact(backoff));
-setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-throw new ProcedureSuspendedException();
-  }
+  setTimeout(Math.toIntExact(backoff));
+  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+  throw new ProcedureSuspendedException();
 }
 
 return new Procedure[] {this};
@@ -387,7 +384,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
+env.getProcedureScheduler().addFront(this);
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6047fb05/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 0f62f8e..16648c0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,17 +24,15 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ 

hbase git commit: Revert "HBASE-21095 The timeout retry logic for several procedures are broken after master restarts"

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 09be1d0d5 -> a220566b9


Revert "HBASE-21095 The timeout retry logic for several procedures are broken 
after master restarts"

This reverts commit 09be1d0d571f861a8f0c47560ebf120f34a44119.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a220566b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a220566b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a220566b

Branch: refs/heads/branch-2
Commit: a220566b9875615e92a848d109b17fcfc1566b19
Parents: 09be1d0
Author: Michael Stack 
Authored: Fri Aug 24 12:24:17 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:24:17 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 +++
 .../assignment/TestUnexpectedStateException.java  | 18 --
 2 files changed, 11 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a220566b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index c10bf2d..0db8676 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -364,9 +364,12 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  setTimeout(Math.toIntExact(backoff));
-  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-  throw new ProcedureSuspendedException();
+  getRegionState(env).getProcedureEvent().suspend();
+  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
+setTimeout(Math.toIntExact(backoff));
+setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+throw new ProcedureSuspendedException();
+  }
 }
 
 return new Procedure[] {this};
@@ -384,7 +387,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-env.getProcedureScheduler().addFront(this);
+getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a220566b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 16648c0..0f62f8e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,15 +24,17 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
+import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
+import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
+import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -44,11 +46,6 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import 

hbase git commit: Revert "HBASE-21095 The timeout retry logic for several procedures are broken after master restarts"

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 dae6b2f3f -> 19b722e84


Revert "HBASE-21095 The timeout retry logic for several procedures are broken 
after master restarts"

This reverts commit dae6b2f3fbeb0e2af21cac9a3a3255416fbc9bf2.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/19b722e8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/19b722e8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/19b722e8

Branch: refs/heads/branch-2.0
Commit: 19b722e8412b48d6318a9599b702995e99099d7e
Parents: dae6b2f
Author: Michael Stack 
Authored: Fri Aug 24 12:24:51 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:24:51 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 +++
 .../assignment/TestUnexpectedStateException.java  | 18 --
 2 files changed, 11 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/19b722e8/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index c9d141a..de39f4c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -371,9 +371,12 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  setTimeout(Math.toIntExact(backoff));
-  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-  throw new ProcedureSuspendedException();
+  getRegionState(env).getProcedureEvent().suspend();
+  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
+setTimeout(Math.toIntExact(backoff));
+setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+throw new ProcedureSuspendedException();
+  }
 }
 
 return new Procedure[] {this};
@@ -391,7 +394,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-env.getProcedureScheduler().addFront(this);
+getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/19b722e8/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 16648c0..0f62f8e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,15 +24,17 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
+import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
+import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
+import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -44,11 +46,6 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import 

hbase git commit: Revert "HBASE-21095 The timeout retry logic for several procedures are broken after master restarts"

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 b82cd670c -> 4978db810


Revert "HBASE-21095 The timeout retry logic for several procedures are broken 
after master restarts"

This reverts commit b82cd670c3f473fde937c5b8445acc83ca76c5c6.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4978db81
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4978db81
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4978db81

Branch: refs/heads/branch-2.1
Commit: 4978db81028f3a955932589af77599b53d909a46
Parents: b82cd67
Author: Michael Stack 
Authored: Fri Aug 24 12:24:32 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:24:32 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 +++
 .../assignment/TestUnexpectedStateException.java  | 18 --
 2 files changed, 11 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4978db81/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index c10bf2d..0db8676 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -364,9 +364,12 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  setTimeout(Math.toIntExact(backoff));
-  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-  throw new ProcedureSuspendedException();
+  getRegionState(env).getProcedureEvent().suspend();
+  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
+setTimeout(Math.toIntExact(backoff));
+setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+throw new ProcedureSuspendedException();
+  }
 }
 
 return new Procedure[] {this};
@@ -384,7 +387,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-env.getProcedureScheduler().addFront(this);
+getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/4978db81/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 16648c0..0f62f8e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,15 +24,17 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
+import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
+import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
+import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -44,11 +46,6 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import 

hbase git commit: HBASE-21095 The timeout retry logic for several procedures are broken after master restarts

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 2ea45e67d -> 09be1d0d5


HBASE-21095 The timeout retry logic for several procedures are broken after 
master restarts


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/09be1d0d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/09be1d0d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/09be1d0d

Branch: refs/heads/branch-2
Commit: 09be1d0d571f861a8f0c47560ebf120f34a44119
Parents: 2ea45e6
Author: Allan Yang 
Authored: Fri Aug 24 12:19:47 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:21:01 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 ---
 .../assignment/TestUnexpectedStateException.java  | 18 ++
 2 files changed, 18 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/09be1d0d/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 0db8676..c10bf2d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -364,12 +364,9 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  getRegionState(env).getProcedureEvent().suspend();
-  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
-setTimeout(Math.toIntExact(backoff));
-setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-throw new ProcedureSuspendedException();
-  }
+  setTimeout(Math.toIntExact(backoff));
+  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+  throw new ProcedureSuspendedException();
 }
 
 return new Procedure[] {this};
@@ -387,7 +384,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
+env.getProcedureScheduler().addFront(this);
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/09be1d0d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 0f62f8e..16648c0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,17 +24,15 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -46,6 +44,11 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
+import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
+import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
+import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
+
 /**
  * Tests for 

hbase git commit: HBASE-21095 The timeout retry logic for several procedures are broken after master restarts

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 66add5523 -> b82cd670c


HBASE-21095 The timeout retry logic for several procedures are broken after 
master restarts


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b82cd670
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b82cd670
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b82cd670

Branch: refs/heads/branch-2.1
Commit: b82cd670c3f473fde937c5b8445acc83ca76c5c6
Parents: 66add55
Author: Allan Yang 
Authored: Fri Aug 24 12:19:47 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:20:43 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 ---
 .../assignment/TestUnexpectedStateException.java  | 18 ++
 2 files changed, 18 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b82cd670/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 0db8676..c10bf2d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -364,12 +364,9 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  getRegionState(env).getProcedureEvent().suspend();
-  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
-setTimeout(Math.toIntExact(backoff));
-setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-throw new ProcedureSuspendedException();
-  }
+  setTimeout(Math.toIntExact(backoff));
+  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+  throw new ProcedureSuspendedException();
 }
 
 return new Procedure[] {this};
@@ -387,7 +384,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
+env.getProcedureScheduler().addFront(this);
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b82cd670/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 0f62f8e..16648c0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,17 +24,15 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -46,6 +44,11 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
+import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
+import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
+import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
+
 /**
  * Tests 

hbase git commit: HBASE-21095 The timeout retry logic for several procedures are broken after master restarts

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 ba452da4b -> dae6b2f3f


HBASE-21095 The timeout retry logic for several procedures are broken after 
master restarts


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dae6b2f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dae6b2f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dae6b2f3

Branch: refs/heads/branch-2.0
Commit: dae6b2f3fbeb0e2af21cac9a3a3255416fbc9bf2
Parents: ba452da
Author: Allan Yang 
Authored: Fri Aug 24 12:19:47 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 12:19:47 2018 -0700

--
 .../assignment/RegionTransitionProcedure.java | 11 ---
 .../assignment/TestUnexpectedStateException.java  | 18 ++
 2 files changed, 18 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dae6b2f3/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index de39f4c..c9d141a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -371,12 +371,9 @@ public abstract class RegionTransitionProcedure
   LOG.warn("Failed transition, suspend {}secs {}; {}; waiting on rectified 
condition fixed " +
   "by other Procedure or operator intervention", backoff / 1000, 
this,
   regionNode.toShortString(), e);
-  getRegionState(env).getProcedureEvent().suspend();
-  if (getRegionState(env).getProcedureEvent().suspendIfNotReady(this)) {
-setTimeout(Math.toIntExact(backoff));
-setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-throw new ProcedureSuspendedException();
-  }
+  setTimeout(Math.toIntExact(backoff));
+  setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
+  throw new ProcedureSuspendedException();
 }
 
 return new Procedure[] {this};
@@ -394,7 +391,7 @@ public abstract class RegionTransitionProcedure
   @Override
   protected synchronized boolean setTimeoutFailure(MasterProcedureEnv env) {
 setState(ProcedureProtos.ProcedureState.RUNNABLE);
-getRegionState(env).getProcedureEvent().wake(env.getProcedureScheduler());
+env.getProcedureScheduler().addFront(this);
 return false; // 'false' means that this procedure handled the timeout
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/dae6b2f3/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
index 0f62f8e..16648c0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestUnexpectedStateException.java
@@ -24,17 +24,15 @@ import java.util.Iterator;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
-import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
-import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
-import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -46,6 +44,11 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
+import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
+import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
+import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
+
 /**
  * Tests 

hbase git commit: HBASE-18477 disable nightly tests on branch.

2018-08-24 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18477 2355586d9 -> ccabf8feb


HBASE-18477 disable nightly tests on branch.

Signed-off-by: Zach York 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ccabf8fe
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ccabf8fe
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ccabf8fe

Branch: refs/heads/HBASE-18477
Commit: ccabf8febe30a7e1f93e197e393c9a3d15fc4575
Parents: 2355586
Author: Sean Busbey 
Authored: Fri Aug 24 13:06:08 2018 -0500
Committer: Sean Busbey 
Committed: Fri Aug 24 13:06:35 2018 -0500

--
 dev-support/Jenkinsfile | 706 ---
 1 file changed, 706 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ccabf8fe/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
deleted file mode 100644
index 59d3227..000
--- a/dev-support/Jenkinsfile
+++ /dev/null
@@ -1,706 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-pipeline {
-  agent {
-node {
-  label 'ubuntu'
-}
-  }
-  triggers {
-cron('@daily')
-  }
-  options {
-buildDiscarder(logRotator(numToKeepStr: '30'))
-timeout (time: 9, unit: 'HOURS')
-timestamps()
-skipDefaultCheckout()
-  }
-  environment {
-YETUS_RELEASE = '0.7.0'
-// where we'll write everything from different steps. Need a copy here so 
the final step can check for success/failure.
-OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
-OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
-OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
-OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
-
-PROJECT = 'hbase'
-PROJECT_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
-PERSONALITY_FILE = 'tools/personality.sh'
-// This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
-AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
-WHITESPACE_IGNORE_LIST = '.*/generated/.*'
-// output from surefire; sadly the archive function in yetus only works on 
file names.
-ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
-// These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
-TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
-// Flaky urls for different branches. Replace '-' and '.' in branch name 
by '_' because those
-// characters are not allowed in bash variable name.
-// Not excluding flakies from the nightly build for now.
-// EXCLUDE_TESTS_URL_master = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
-// EXCLUDE_TESTS_URL_branch_2 = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests-branch2.0/lastSuccessfulBuild/artifact/excludes/'
-  }
-  parameters {
-booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
-
-Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
-booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
-  }
-  stages {
-stage ('scm-checkout') {
-  steps {
-dir('component') {
-  checkout scm
-}
-  }
-}
-stage ('thirdparty installs') {
-  parallel {
-stage ('yetus install') {
-  steps {
-// directory must be unique for each parallel stage, because 
jenkins runs them in the same workspace :(
-dir('downloads-yetus') {
-  // can't just do a simple echo or the directory won't be 

hbase git commit: HBASE-21072 Block out HBCK1 in hbase2 Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing state to an hbase-2.x cluster (could do damage). Set hbase.write.hbck1.

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master a452487a9 -> 86b35b268


HBASE-21072 Block out HBCK1 in hbase2
Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing
state to an hbase-2.x cluster (could do damage).
Set hbase.write.hbck1.lock.file to false to disable this writing.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/86b35b26
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/86b35b26
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/86b35b26

Branch: refs/heads/master
Commit: 86b35b26870be3a04304a4483c08fcf7c50d55f5
Parents: a452487
Author: Michael Stack 
Authored: Wed Aug 22 22:44:00 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 09:26:43 2018 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java |   9 ++
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 109 +--
 .../apache/hadoop/hbase/master/TestMaster.java  |  42 ++-
 3 files changed, 126 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/86b35b26/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 50794f4..92afa9c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -193,6 +193,7 @@ import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.EncryptionTest;
+import org.apache.hadoop.hbase.util.HBaseFsck;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.util.IdLock;
@@ -915,6 +916,14 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId());
 this.clusterId = clusterId.toString();
 
+// Precaution. Put in place the old hbck1 lock file to fence out old 
hbase1s running their
+// hbck1s against an hbase2 cluster; it could do damage. To skip this 
behavior, set
+// hbase.write.hbck1.lock.file to false.
+if (this.conf.getBoolean("hbase.write.hbck1.lock.file", true)) {
+  HBaseFsck.checkAndMarkRunningHbck(this.conf,
+  HBaseFsck.createLockRetryCounterFactory(this.conf).create());
+}
+
 status.setStatus("Initialze ServerManager and schedule SCP for crash 
servers");
 this.serverManager = createServerManager(this);
 createProcedureExecutor();

http://git-wip-us.apache.org/repos/asf/hbase/blob/86b35b26/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 820a4e0..b43262d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -144,6 +144,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
@@ -164,7 +165,10 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic
 
 /**
  * HBaseFsck (hbck) is a tool for checking and repairing region consistency and
- * table integrity problems in a corrupted HBase.
+ * table integrity problems in a corrupted HBase. This tool was written for 
hbase-1.x. It does not
+ * work with hbase-2.x; it can read state but is not allowed to change state; 
i.e. effect 'repair'.
+ * See hbck2 (HBASE-19121) for a hbck tool for hbase2.
+ *
  * 
  * Region consistency checks verify that hbase:meta, region deployment on 
region
  * servers and the state of data in HDFS (.regioninfo files) all are in
@@ -217,7 +221,12 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;
   private static final int DEFAULT_MAX_MERGE = 5;
   private static final String TO_BE_LOADED = "to_be_loaded";
-  private static final String HBCK_LOCK_FILE = "hbase-hbck.lock";
+  /**
+   * Here is 

hbase git commit: HBASE-21072 Block out HBCK1 in hbase2 Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing state to an hbase-2.x cluster (could do damage). Set hbase.write.hbck1.

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 c33af1e85 -> 2ea45e67d


HBASE-21072 Block out HBCK1 in hbase2
Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing
state to an hbase-2.x cluster (could do damage).
Set hbase.write.hbck1.lock.file to false to disable this writing.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2ea45e67
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2ea45e67
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2ea45e67

Branch: refs/heads/branch-2
Commit: 2ea45e67d0f0c048dfefdc8230d70ffcd5e6dd5b
Parents: c33af1e
Author: Michael Stack 
Authored: Wed Aug 22 22:44:00 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 09:23:54 2018 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java |   9 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 109 +--
 .../apache/hadoop/hbase/master/TestMaster.java  |  43 +++-
 3 files changed, 127 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2ea45e67/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index bf0c790..2a92104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -185,6 +185,7 @@ import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.EncryptionTest;
+import org.apache.hadoop.hbase.util.HBaseFsck;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.util.IdLock;
@@ -864,7 +865,13 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId());
 this.clusterId = clusterId.toString();
 
-
+// Precaution. Put in place the old hbck1 lock file to fence out old 
hbase1s running their
+// hbck1s against an hbase2 cluster; it could do damage. To skip this 
behavior, set
+// hbase.write.hbck1.lock.file to false.
+if (this.conf.getBoolean("hbase.write.hbck1.lock.file", true)) {
+  HBaseFsck.checkAndMarkRunningHbck(this.conf,
+  HBaseFsck.createLockRetryCounterFactory(this.conf).create());
+}
 
 status.setStatus("Initialze ServerManager and schedule SCP for crash 
servers");
 this.serverManager = createServerManager(this);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ea45e67/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index c479759..992c986 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -144,6 +144,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
@@ -164,7 +165,10 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic
 
 /**
  * HBaseFsck (hbck) is a tool for checking and repairing region consistency and
- * table integrity problems in a corrupted HBase.
+ * table integrity problems in a corrupted HBase. This tool was written for 
hbase-1.x. It does not
+ * work with hbase-2.x; it can read state but is not allowed to change state; 
i.e. effect 'repair'.
+ * See hbck2 (HBASE-19121) for a hbck tool for hbase2.
+ *
  * 
  * Region consistency checks verify that hbase:meta, region deployment on 
region
  * servers and the state of data in HDFS (.regioninfo files) all are in
@@ -217,7 +221,12 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;
   private static final int DEFAULT_MAX_MERGE = 5;
   private static final String TO_BE_LOADED = "to_be_loaded";
-  private static final String HBCK_LOCK_FILE = "hbase-hbck.lock";
+  /**
+   * Here is where hbase-1.x used to 

hbase git commit: HBASE-21072 Block out HBCK1 in hbase2 Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing state to an hbase-2.x cluster (could do damage). Set hbase.write.hbck1.

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 8a9acd4d2 -> 66add5523


HBASE-21072 Block out HBCK1 in hbase2
Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing
state to an hbase-2.x cluster (could do damage).
Set hbase.write.hbck1.lock.file to false to disable this writing.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/66add552
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/66add552
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/66add552

Branch: refs/heads/branch-2.1
Commit: 66add552348d957ac7853ea7eb370c6629bdeaca
Parents: 8a9acd4
Author: Michael Stack 
Authored: Wed Aug 22 22:44:00 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 09:22:53 2018 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java |   9 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 109 +--
 .../apache/hadoop/hbase/master/TestMaster.java  |  43 +++-
 3 files changed, 127 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/66add552/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index bf0c790..2a92104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -185,6 +185,7 @@ import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.EncryptionTest;
+import org.apache.hadoop.hbase.util.HBaseFsck;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.util.IdLock;
@@ -864,7 +865,13 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId());
 this.clusterId = clusterId.toString();
 
-
+// Precaution. Put in place the old hbck1 lock file to fence out old 
hbase1s running their
+// hbck1s against an hbase2 cluster; it could do damage. To skip this 
behavior, set
+// hbase.write.hbck1.lock.file to false.
+if (this.conf.getBoolean("hbase.write.hbck1.lock.file", true)) {
+  HBaseFsck.checkAndMarkRunningHbck(this.conf,
+  HBaseFsck.createLockRetryCounterFactory(this.conf).create());
+}
 
 status.setStatus("Initialze ServerManager and schedule SCP for crash 
servers");
 this.serverManager = createServerManager(this);

http://git-wip-us.apache.org/repos/asf/hbase/blob/66add552/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index c479759..992c986 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -144,6 +144,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
@@ -164,7 +165,10 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic
 
 /**
  * HBaseFsck (hbck) is a tool for checking and repairing region consistency and
- * table integrity problems in a corrupted HBase.
+ * table integrity problems in a corrupted HBase. This tool was written for 
hbase-1.x. It does not
+ * work with hbase-2.x; it can read state but is not allowed to change state; 
i.e. effect 'repair'.
+ * See hbck2 (HBASE-19121) for a hbck tool for hbase2.
+ *
  * 
  * Region consistency checks verify that hbase:meta, region deployment on 
region
  * servers and the state of data in HDFS (.regioninfo files) all are in
@@ -217,7 +221,12 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;
   private static final int DEFAULT_MAX_MERGE = 5;
   private static final String TO_BE_LOADED = "to_be_loaded";
-  private static final String HBCK_LOCK_FILE = "hbase-hbck.lock";
+  /**
+   * Here is where hbase-1.x used 

hbase git commit: HBASE-21072 Block out HBCK1 in hbase2 Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing state to an hbase-2.x cluster (could do damage). Set hbase.write.hbck1.

2018-08-24 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 ed235c97e -> ba452da4b


HBASE-21072 Block out HBCK1 in hbase2
Write the hbase-1.x hbck1 lock file to block out hbck1 instances writing
state to an hbase-2.x cluster (could do damage).
Set hbase.write.hbck1.lock.file to false to disable this writing.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ba452da4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ba452da4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ba452da4

Branch: refs/heads/branch-2.0
Commit: ba452da4bab9bdbe542115ebdfecc021c4f1e848
Parents: ed235c9
Author: Michael Stack 
Authored: Wed Aug 22 22:44:00 2018 -0700
Committer: Michael Stack 
Committed: Fri Aug 24 09:22:00 2018 -0700

--
 .../org/apache/hadoop/hbase/master/HMaster.java |   9 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 109 +--
 .../apache/hadoop/hbase/master/TestMaster.java  |  43 +++-
 3 files changed, 127 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ba452da4/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 3f296c1..d5167c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -173,6 +173,7 @@ import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.EncryptionTest;
+import org.apache.hadoop.hbase.util.HBaseFsck;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.util.IdLock;
@@ -828,7 +829,13 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId());
 this.clusterId = clusterId.toString();
 
-
+// Precaution. Put in place the old hbck1 lock file to fence out old 
hbase1s running their
+// hbck1s against an hbase2 cluster; it could do damage. To skip this 
behavior, set
+// hbase.write.hbck1.lock.file to false.
+if (this.conf.getBoolean("hbase.write.hbck1.lock.file", true)) {
+  HBaseFsck.checkAndMarkRunningHbck(this.conf,
+  HBaseFsck.createLockRetryCounterFactory(this.conf).create());
+}
 
 status.setStatus("Initialze ServerManager and schedule SCP for crash 
servers");
 this.serverManager = createServerManager(this);

http://git-wip-us.apache.org/repos/asf/hbase/blob/ba452da4/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index b5533d4..fec323a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -135,6 +135,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
@@ -155,7 +156,10 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic
 
 /**
  * HBaseFsck (hbck) is a tool for checking and repairing region consistency and
- * table integrity problems in a corrupted HBase.
+ * table integrity problems in a corrupted HBase. This tool was written for 
hbase-1.x. It does not
+ * work with hbase-2.x; it can read state but is not allowed to change state; 
i.e. effect 'repair'.
+ * See hbck2 (HBASE-19121) for a hbck tool for hbase2.
+ *
  * 
  * Region consistency checks verify that hbase:meta, region deployment on 
region
  * servers and the state of data in HDFS (.regioninfo files) all are in
@@ -208,7 +212,12 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;
   private static final int DEFAULT_MAX_MERGE = 5;
   private static final String TO_BE_LOADED = "to_be_loaded";
-  private static final String HBCK_LOCK_FILE = "hbase-hbck.lock";
+  /**
+   * Here is where hbase-1.x used 

[27/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/RowFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/RowFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/RowFilter.html
index 2db4997..14fcebb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/RowFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/RowFilter.html
@@ -29,146 +29,161 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CompareOperator;
-027import 
org.apache.yetus.audience.InterfaceAudience;
-028import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-029import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032
-033/**
-034 * This filter is used to filter based on 
the key. It takes an operator
-035 * (equal, greater, not equal, etc) and a 
byte [] comparator for the row,
-036 * and column qualifier portions of a 
key.
-037 * p
-038 * This filter can be wrapped with {@link 
WhileMatchFilter} to add more control.
-039 * p
-040 * Multiple filters can be combined using 
{@link FilterList}.
-041 * p
-042 * If an already known row range needs to 
be scanned, 
-043 * use {@link 
org.apache.hadoop.hbase.CellScanner} start
-044 * and stop rows directly rather than a 
filter.
-045 */
-046@InterfaceAudience.Public
-047public class RowFilter extends 
CompareFilter {
-048
-049  private boolean filterOutRow = false;
-050
-051  /**
-052   * Constructor.
-053   * @param rowCompareOp the compare op 
for row matching
-054   * @param rowComparator the comparator 
for row matching
-055   * @deprecated Since 2.0.0. Will remove 
in 3.0.0. Use
-056   * {@link #RowFilter(CompareOperator, 
ByteArrayComparable)}} instead.
-057   */
-058  @Deprecated
-059  public RowFilter(final CompareOp 
rowCompareOp,
-060  final ByteArrayComparable 
rowComparator) {
-061super(rowCompareOp, rowComparator);
-062  }
-063
-064  /**
-065   * Constructor.
-066   * @param op the compare op for row 
matching
-067   * @param rowComparator the comparator 
for row matching
-068   */
-069  public RowFilter(final CompareOperator 
op,
-070   final 
ByteArrayComparable rowComparator) {
-071super(op, rowComparator);
-072  }
-073
-074  @Override
-075  public void reset() {
-076this.filterOutRow = false;
-077  }
-078
-079  @Deprecated
-080  @Override
-081  public ReturnCode filterKeyValue(final 
Cell c) {
-082return filterCell(c);
-083  }
-084
-085  @Override
-086  public ReturnCode filterCell(final Cell 
v) {
-087if(this.filterOutRow) {
-088  return ReturnCode.NEXT_ROW;
-089}
-090return ReturnCode.INCLUDE;
-091  }
-092
-093  @Override
-094  public boolean filterRowKey(Cell 
firstRowCell) {
-095if (compareRow(getCompareOperator(), 
this.comparator, firstRowCell)) {
-096  this.filterOutRow = true;
-097}
-098return this.filterOutRow;
-099  }
-100
-101  @Override
-102  public boolean filterRow() {
-103return this.filterOutRow;
-104  }
-105
-106  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-107@SuppressWarnings("rawtypes") // for 
arguments
-108ArrayList arguments = 
CompareFilter.extractArguments(filterArguments);
-109CompareOperator compareOp = 
(CompareOperator)arguments.get(0);
-110ByteArrayComparable comparator = 
(ByteArrayComparable)arguments.get(1);
-111return new RowFilter(compareOp, 
comparator);
-112  }
-113
-114 /**
-115  * @return The filter serialized using 
pb
-116  */
-117  @Override
-118  public byte [] toByteArray() {
-119FilterProtos.RowFilter.Builder 
builder =
-120  
FilterProtos.RowFilter.newBuilder();
-121
builder.setCompareFilter(super.convert());
-122return 
builder.build().toByteArray();
-123  }
-124
-125  /**
-126   * @param pbBytes A pb serialized 
{@link RowFilter} instance
-127   * @return An instance of {@link 
RowFilter} made from codebytes/code
-128   * @throws DeserializationException
-129   * @see #toByteArray
-130   */
-131  public static RowFilter parseFrom(final 
byte [] pbBytes)
-132  throws DeserializationException {
-133FilterProtos.RowFilter proto;
-134try {
-135  proto = 
FilterProtos.RowFilter.parseFrom(pbBytes);
-136} catch 
(InvalidProtocolBufferException e) {
-137  throw new 
DeserializationException(e);
-138}
-139final CompareOperator valueCompareOp 
=
-140  
CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name());
-141ByteArrayComparable valueComparator = 
null;
-142try {
-143  if 
(proto.getCompareFilter().hasComparator()) {
-144valueComparator = 

[32/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
index a9629dd..1e6a2bb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
@@ -31,430 +31,446 @@
 023import java.util.ArrayList;
 024import java.util.Collections;
 025import java.util.Iterator;
-026import java.util.Optional;
-027import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.HConstants;
-030import org.apache.hadoop.hbase.Tag;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033import 
org.apache.hadoop.hbase.util.ClassSize;
-034import 
org.apache.yetus.audience.InterfaceAudience;
-035
-036import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-037import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-039
-040/**
-041 * A filter that will only return the key 
component of each KV (the value will
-042 * be rewritten as empty).
-043 * p
-044 * This filter can be used to grab all of 
the keys without having to also grab
-045 * the values.
-046 */
-047@InterfaceAudience.Public
-048public class KeyOnlyFilter extends 
FilterBase {
-049
-050  boolean lenAsVal;
-051  public KeyOnlyFilter() { this(false); 
}
-052  public KeyOnlyFilter(boolean lenAsVal) 
{ this.lenAsVal = lenAsVal; }
-053
-054  @Override
-055  public boolean filterRowKey(Cell cell) 
throws IOException {
-056// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-057return false;
-058  }
-059
-060  @Override
-061  public Cell transformCell(Cell cell) 
{
-062return createKeyOnlyCell(cell);
-063  }
-064
-065  private Cell createKeyOnlyCell(Cell c) 
{
-066if (c instanceof 
ByteBufferExtendedCell) {
-067  return new 
KeyOnlyByteBufferExtendedCell((ByteBufferExtendedCell) c, lenAsVal);
-068} else {
-069  return new KeyOnlyCell(c, 
lenAsVal);
-070}
-071  }
-072
-073  @Deprecated
-074  @Override
-075  public ReturnCode filterKeyValue(final 
Cell ignored) throws IOException {
-076return filterCell(ignored);
-077  }
-078
-079  @Override
-080  public ReturnCode filterCell(final Cell 
ignored) throws IOException {
-081return ReturnCode.INCLUDE;
-082  }
-083
-084  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-085
Preconditions.checkArgument((filterArguments.isEmpty() || 
filterArguments.size() == 1),
-086
"Expected: 0 or 1 but got: %s", filterArguments.size());
-087KeyOnlyFilter filter = new 
KeyOnlyFilter();
-088if (filterArguments.size() == 1) {
-089  filter.lenAsVal = 
ParseFilter.convertByteArrayToBoolean(filterArguments.get(0));
-090}
-091return filter;
-092  }
-093
-094  /**
-095   * @return The filter serialized using 
pb
-096   */
-097  @Override
-098  public byte [] toByteArray() {
-099FilterProtos.KeyOnlyFilter.Builder 
builder =
-100  
FilterProtos.KeyOnlyFilter.newBuilder();
-101builder.setLenAsVal(this.lenAsVal);
-102return 
builder.build().toByteArray();
-103  }
-104
-105  /**
-106   * @param pbBytes A pb serialized 
{@link KeyOnlyFilter} instance
-107   * @return An instance of {@link 
KeyOnlyFilter} made from codebytes/code
-108   * @throws DeserializationException
-109   * @see #toByteArray
-110   */
-111  public static KeyOnlyFilter 
parseFrom(final byte [] pbBytes)
-112  throws DeserializationException {
-113FilterProtos.KeyOnlyFilter proto;
-114try {
-115  proto = 
FilterProtos.KeyOnlyFilter.parseFrom(pbBytes);
-116} catch 
(InvalidProtocolBufferException e) {
-117  throw new 
DeserializationException(e);
-118}
-119return new 
KeyOnlyFilter(proto.getLenAsVal());
-120  }
-121
-122  /**
-123   * @param o the other filter to compare 
with
-124   * @return true if and only if the 
fields of the filter that are serialized
-125   * are equal to the corresponding 
fields in other.  Used for testing.
-126   */
-127  @Override
-128  boolean areSerializedFieldsEqual(Filter 
o) {
-129if (o == this) return true;
-130if (!(o instanceof KeyOnlyFilter)) 
return false;
-131
-132KeyOnlyFilter other = 
(KeyOnlyFilter)o;
-133return this.lenAsVal == 
other.lenAsVal;
-134  }
-135
-136  static class KeyOnlyCell implements 
Cell {
-137private Cell cell;
-138private boolean lenAsVal;
-139
-140public KeyOnlyCell(Cell c, boolean 
lenAsVal) {
-141  this.cell = c;
-142  this.lenAsVal = lenAsVal;
-143}
-144

[43/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
index eaeb725..eddc34e 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":42,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class TimestampsFilter
+public class TimestampsFilter
 extends org.apache.hadoop.hbase.filter.FilterBase
 Filter that returns only cells whose timestamp (version) is
  in the specified list of timestamps (versions).
@@ -210,56 +210,64 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 long
 getMin()
 Gets the minimum timestamp requested by filter.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Pick the next cell that the scanner should seek to.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getTimestamps()
 
-
+
+int
+hashCode()
+
+
 static TimestampsFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString(intmaxTimestamps)
 
@@ -283,7 +291,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, 

[17/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithOR.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithOR.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithOR.html
index 3282c7f..ece1819 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithOR.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterListWithOR.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -125,7 +125,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class FilterListWithOR
+public class FilterListWithOR
 extends FilterListBase
 FilterListWithOR represents an ordered list of filters 
which will be evaluated with an OR
  operator.
@@ -224,18 +224,22 @@ extends 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -243,7 +247,7 @@ extends 
+
 boolean
 filterRowKey(byte[]rowKey,
 intoffset,
@@ -252,38 +256,42 @@ extends 
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 formatLogFilters(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListFilterlogFilters)
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
+int
+hashCode()
+
+
 private Filter.ReturnCode
 mergeReturnCode(Filter.ReturnCoderc,
Filter.ReturnCodelocalRC)
 FilterList with MUST_PASS_ONE choose the minimal forward 
step among sub-filter in filter list.
 
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 private boolean
 shouldPassCurrentCellToFilter(CellprevCell,
  CellcurrentCell,
@@ -293,13 +301,13 @@ extends 
+
 private void
 updatePrevCellList(intindex,
   CellcurrentCell,
   Filter.ReturnCodecurrentRC)
 
-
+
 private void
 updatePrevFilterRCList(intindex,
   Filter.ReturnCodecurrentRC)
@@ -331,7 +339,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or 

[09/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 7c60d28..54d332f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -276,9 +276,9 @@
 
 org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
-org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
 org.apache.hadoop.hbase.io.hfile.BlockType
+org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
+org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
 org.apache.hadoop.hbase.io.hfile.BlockPriority
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index c12ae36..91a2ed4 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -353,9 +353,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 org.apache.hadoop.hbase.ipc.CallEvent.Type
 org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
+org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index deb19ba..dfed8e7 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -293,10 +293,10 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.mapreduce.CellCounter.CellCounterMapper.Counters
-org.apache.hadoop.hbase.mapreduce.SyncTable.SyncMapper.Counter
 org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper.Counters
 org.apache.hadoop.hbase.mapreduce.TableSplit.Version
+org.apache.hadoop.hbase.mapreduce.CellCounter.CellCounterMapper.Counters
+org.apache.hadoop.hbase.mapreduce.SyncTable.SyncMapper.Counter
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
index d1b7e01..3ca26df 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 

[44/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
index 039407c..07ead26 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":42,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":42,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":42,"i12":10,"i13":10,"i14":10,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class SingleColumnValueFilter
+public class SingleColumnValueFilter
 extends org.apache.hadoop.hbase.filter.FilterBase
 This filter is used to filter cells based on value. It 
takes a CompareFilter.CompareOp
  operator (equal, greater, not equal, etc), and either a byte [] value or
@@ -327,18 +327,22 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -346,37 +350,37 @@ extends org.apache.hadoop.hbase.filter.FilterBase
  never filters a row.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 ByteArrayComparable
 getComparator()
 
-
+
 CompareOperator
 getCompareOperator()
 
-
+
 byte[]
 getFamily()
 
-
+
 boolean
 getFilterIfMissing()
 Get whether entire row should be filtered if column is not 
found.
 
 
-
+
 boolean
 getLatestVersionOnly()
 Get whether only the latest version of the column value 
should be compared.
 
 
-
+
 CompareFilter.CompareOp
 getOperator()
 Deprecated.
@@ -384,53 +388,57 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 
-
+
 byte[]
 getQualifier()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 boolean
 isFamilyEssential(byte[]name)
 The only CF this filter needs is given column family.
 
 
-
+
 static SingleColumnValueFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setFilterIfMissing(booleanfilterIfMissing)
 Set whether entire row should be filtered if column is not 
found.
 
 
-
+
 void
 setLatestVersionOnly(booleanlatestVersionOnly)
 Set whether only the latest version of the column value 
should be compared.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -456,7 +464,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in 

[19/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/DependentColumnFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
index 7d9d2af..4421ed5 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":10,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":9,"i16":10,"i17":10,"i18":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -125,7 +125,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class DependentColumnFilter
+public class DependentColumnFilter
 extends CompareFilter
 A filter for adding inter-column timestamp matching
  Only cells with a correspondingly timestamped entry in
@@ -285,24 +285,28 @@ extends 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -310,14 +314,14 @@ extends 
+
 void
 filterRowCells(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellkvs)
 Filters that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
 boolean
 filterRowKey(byte[]buffer,
 intoffset,
@@ -326,43 +330,47 @@ extends 
+
 boolean
 getDropDependentColumn()
 
-
+
 byte[]
 getFamily()
 
-
+
 byte[]
 getQualifier()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 static DependentColumnFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -395,7 +403,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
index 4804996..c3cf972 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
@@ -28,500 +28,538 @@
 020import java.util.ArrayList;
 021import java.util.Collections;
 022import java.util.List;
-023
-024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellUtil;
-026import 
org.apache.hadoop.hbase.HConstants;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035/**
-036 * Filter to support scan multiple row 
key ranges. It can construct the row key ranges from the
-037 * passed list which can be accessed by 
each region server.
-038 *
-039 * HBase is quite efficient when scanning 
only one small row key range. If user needs to specify
-040 * multiple row key ranges in one scan, 
the typical solutions are: 1. through FilterList which is a
-041 * list of row key Filters, 2. using the 
SQL layer over HBase to join with two table, such as hive,
-042 * phoenix etc. However, both solutions 
are inefficient. Both of them can't utilize the range info
-043 * to perform fast forwarding during scan 
which is quite time consuming. If the number of ranges
-044 * are quite big (e.g. millions), join is 
a proper solution though it is slow. However, there are
-045 * cases that user wants to specify a 
small number of ranges to scan (e.g. lt;1000 ranges). Both
-046 * solutions can't provide satisfactory 
performance in such case. MultiRowRangeFilter is to support
-047 * such usec ase (scan multiple row key 
ranges), which can construct the row key ranges from user
-048 * specified list and perform 
fast-forwarding during scan. Thus, the scan will be quite efficient.
-049 */
-050@InterfaceAudience.Public
-051public class MultiRowRangeFilter extends 
FilterBase {
-052
-053  private ListRowRange 
rangeList;
-054
-055  private static final int 
ROW_BEFORE_FIRST_RANGE = -1;
-056  private boolean EXCLUSIVE = false;
-057  private boolean done = false;
-058  private boolean initialized = false;
-059  private int index;
-060  private RowRange range;
-061  private ReturnCode currentReturnCode;
-062
-063  /**
-064   * @param list A list of 
codeRowRange/code
-065   */
-066  public 
MultiRowRangeFilter(ListRowRange list) {
-067this.rangeList = 
sortAndMerge(list);
-068  }
-069
-070  @Override
-071  public boolean filterAllRemaining() {
-072return done;
-073  }
-074
-075  public ListRowRange 
getRowRanges() {
-076return this.rangeList;
-077  }
-078
-079  @Override
-080  public boolean filterRowKey(Cell 
firstRowCell) {
-081if (filterAllRemaining()) return 
true;
-082// If it is the first time of 
running, calculate the current range index for
-083// the row key. If index is out of 
bound which happens when the start row
-084// user sets is after the largest 
stop row of the ranges, stop the scan.
-085// If row key is after the current 
range, find the next range and update index.
-086byte[] rowArr = 
firstRowCell.getRowArray();
-087int length = 
firstRowCell.getRowLength();
-088int offset = 
firstRowCell.getRowOffset();
-089if (!initialized
-090|| !range.contains(rowArr, 
offset, length)) {
-091  byte[] rowkey = 
CellUtil.cloneRow(firstRowCell);
-092  index = 
getNextRangeIndex(rowkey);
-093  if (index = rangeList.size()) 
{
-094done = true;
-095currentReturnCode = 
ReturnCode.NEXT_ROW;
-096return false;
-097  }
-098  if(index != ROW_BEFORE_FIRST_RANGE) 
{
-099range = rangeList.get(index);
-100  } else {
-101range = rangeList.get(0);
-102  }
-103  if (EXCLUSIVE) {
-104EXCLUSIVE = false;
-105currentReturnCode = 
ReturnCode.NEXT_ROW;
-106return false;
-107  }
-108  if (!initialized) {
-109if(index != 
ROW_BEFORE_FIRST_RANGE) {
-110  currentReturnCode = 
ReturnCode.INCLUDE;
-111} else {
-112  currentReturnCode = 
ReturnCode.SEEK_NEXT_USING_HINT;
-113}
-114initialized = true;
-115  } else {
-116if (range.contains(rowArr, 
offset, length)) {
-117  currentReturnCode = 
ReturnCode.INCLUDE;
-118   

[22/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/coc.html
--
diff --git a/coc.html b/coc.html
index 0348c50..e294239 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -375,7 +375,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-23
+  Last Published: 
2018-08-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 1d56516..89c744b 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -440,7 +440,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-23
+  Last Published: 
2018-08-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 70dce3f..bb4d10e 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -890,7 +890,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-23
+  Last Published: 
2018-08-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 5161863..334c8d2 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -313,7 +313,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-23
+  Last Published: 
2018-08-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index bdd6a56..3302203 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -1005,7 +1005,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-23
+  Last Published: 
2018-08-24
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 61492ef..c99b5c4 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3817,21 +3817,21 @@
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 date
-"Thu Aug 23 14:38:46 UTC 2018"
+"Fri Aug 24 14:38:46 UTC 2018"
 
 
 
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 revision
-"6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4"
+"a452487a9b82bfd33bc10683c3f8b8ae74d58883"
 
 
 
 
 publicstaticfinalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 srcChecksum
-"1e08aed9fad639e572ab4a3a705f2a05"
+"6a771691f343c60ea56a144f9db58ab5"
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index ef0e429..0797571 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -28367,8 +28367,66 @@
 
 equals(Object)
 - Method in class 

[40/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
index ee5595f..961079a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
@@ -29,224 +29,240 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CellUtil;
-027import 
org.apache.hadoop.hbase.CompareOperator;
-028import 
org.apache.hadoop.hbase.PrivateCellUtil;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hadoop.hbase.util.Bytes;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032
-033import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-034import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-035import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-036
-037import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-040
-041/**
-042 * Different from {@link 
SingleColumnValueFilter} which returns an bentire/b row
-043 * when specified condition is matched, 
{@link ColumnValueFilter} return the matched cell only.
-044 * p
-045 * This filter is used to filter cells 
based on column and value.
-046 * It takes a {@link 
org.apache.hadoop.hbase.CompareOperator} operator (, =, =, !=, , 
=), and
-047 * and a {@link ByteArrayComparable} 
comparator.
-048 */
-049@InterfaceAudience.Public
-050public class ColumnValueFilter extends 
FilterBase {
-051  private final byte[] family;
-052  private final byte[] qualifier;
-053  private final CompareOperator op;
-054  private final ByteArrayComparable 
comparator;
-055
-056  // This flag is used to speed up 
seeking cells when matched column is found, such that following
-057  // columns in the same row can be 
skipped faster by NEXT_ROW instead of NEXT_COL.
-058  private boolean columnFound = false;
-059
-060  public ColumnValueFilter(final byte[] 
family, final byte[] qualifier,
-061   final 
CompareOperator op, final byte[] value) {
-062this(family, qualifier, op, new 
BinaryComparator(value));
-063  }
-064
-065  public ColumnValueFilter(final byte[] 
family, final byte[] qualifier,
-066   final 
CompareOperator op,
-067   final 
ByteArrayComparable comparator) {
-068this.family = 
Preconditions.checkNotNull(family, "family should not be null.");
-069this.qualifier = qualifier == null ? 
new byte[0] : qualifier;
-070this.op = 
Preconditions.checkNotNull(op, "CompareOperator should not be null");
-071this.comparator = 
Preconditions.checkNotNull(comparator, "Comparator should not be null");
-072  }
-073
-074  /**
-075   * @return operator
-076   */
-077  public CompareOperator 
getCompareOperator() {
-078return op;
-079  }
-080
-081  /**
-082   * @return the comparator
-083   */
-084  public ByteArrayComparable 
getComparator() {
-085return comparator;
-086  }
-087
-088  /**
-089   * @return the column family
-090   */
-091  public byte[] getFamily() {
-092return family;
-093  }
-094
-095  /**
-096   * @return the qualifier
-097   */
-098  public byte[] getQualifier() {
-099return qualifier;
-100  }
-101
-102  @Override
-103  public void reset() throws IOException 
{
-104columnFound = false;
-105  }
-106
-107  @Override
-108  public boolean filterRowKey(Cell cell) 
throws IOException {
-109return false;
-110  }
-111
-112  @Override
-113  public ReturnCode filterCell(Cell c) 
throws IOException {
-114// 1. Check column match
-115if (!CellUtil.matchingColumn(c, 
this.family, this.qualifier)) {
-116  return columnFound ? 
ReturnCode.NEXT_ROW : ReturnCode.NEXT_COL;
-117}
-118// Column found
-119columnFound = true;
-120// 2. Check value match:
-121// True means filter out, just skip 
this cell, else include it.
-122return 
compareValue(getCompareOperator(), getComparator(), c) ?
-123  ReturnCode.SKIP : 
ReturnCode.INCLUDE;
-124  }
-125
-126  /**
-127   * This method is used to determine a 
cell should be included or filtered out.
-128   * @param op one of operators {@link 
CompareOperator}
-129   * @param comparator comparator used to 
compare cells.
-130   * @param cell cell to be compared.
-131   * @return true means cell should be 
filtered out, included otherwise.
-132   */
-133  private boolean compareValue(final 
CompareOperator op, final ByteArrayComparable 

[23/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 6098f6a..236f443 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -9838,12 +9838,12 @@
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation;>JavadocTagContinuationIndentation
 
 offset: 2
-763
+764
 Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription
-3606
+3605
 Error
 
 misc
@@ -19479,7 +19479,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 0 has parse error. Details: no viable 
alternative at input '   *' while parsing JAVADOC_TAG
 117
 
@@ -23225,7 +23225,7 @@
 imports
 ImportOrder
 Wrong order for 
'org.apache.hadoop.hbase.exceptions.DeserializationException' import.
-24
+25
 
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
 
@@ -37613,55 +37613,55 @@
 imports
 ImportOrder
 Wrong order for 
'org.apache.hadoop.hbase.exceptions.DeserializationException' import.
-27
+28
 
 Error
 imports
 ImportOrder
 Wrong order for 
'org.apache.hbase.thirdparty.com.google.common.base.Preconditions' import.
-30
+31
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-56
+57
 
 Error
 annotation
 MissingDeprecated
 Must include both @java.lang.Deprecated annotation and @deprecated Javadoc 
tag with description.
-65
+66
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-103
+104
 
 Error
 indentation
 Indentation
 'throws' has incorrect indentation level 2, expected level should be 
4.
-107
+108
 
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-120
+121
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-124
+125
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-125
+126
 
 org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
 
@@ -37676,79 +37676,79 @@
 imports
 ImportOrder
 Wrong order for 
'org.apache.hadoop.hbase.exceptions.DeserializationException' import.
-28
+29
 
 Error
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.util.Bytes' import.
-30
+31
 
 Error
 sizes
 LineLength
 Line is longer than 100 characters (found 115).
-38
+39
 
 Error
 sizes
 LineLength
 Line is longer than 100 characters (found 110).
-39
+40
 
 Error
 blocks
 LeftCurly
 '{' at column 3 should be on the previous line.
-60
+61
 
 Error
 annotation
 MissingDeprecated
 Must include both @java.lang.Deprecated annotation and @deprecated Javadoc 
tag with description.
-113
+114
 
 Error
 blocks
 LeftCurly
 '{' at column 3 should be on the previous line.
-121
+122
 
 Error
 blocks
 LeftCurly
 '{' at column 3 should be on the previous line.
-156
+157
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-188
+189
 
 Error
 indentation
 Indentation
 'throws' has incorrect indentation level 2, expected level should be 
4.
-192
+193
 
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-209
+210
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-213
+214
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-214
+215
 
 org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
 
@@ -37763,73 +37763,73 @@
 imports
 ImportOrder
 Wrong order for 
'org.apache.hadoop.hbase.exceptions.DeserializationException' import.
-29
+30
 
 Error
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.util.ByteBufferUtils' import.
-31
+32
 
 Error
 annotation
 MissingDeprecated
 Must include both @java.lang.Deprecated annotation and @deprecated Javadoc 
tag with description.
-61
+62
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-120
+121
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-127
+128
 
 Error
 indentation
 Indentation
 'throws' has incorrect indentation level 2, expected level should be 
4.
-131
+132
 
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-144
+145
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-148
+149
 
 Error
 indentation
 Indentation
 'if' has incorrect indentation level 3, expected level should be 4.
-148
+149
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-149
+150
 
 Error
 indentation
 Indentation
 'if' has incorrect indentation level 3, expected level should be 4.
-149
+150
 
 Error
 indentation
 Indentation
 'method def' child has incorrect indentation level 3, expected level 
should be 4.
-151
+152
 
 

[12/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
index ee6b5ad..919255b 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class RandomRowFilter
+public class RandomRowFilter
 extends FilterBase
 A filter that includes rows based on a chance.
 
@@ -217,24 +217,28 @@ extends 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -242,41 +246,45 @@ extends 
 
 
-
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 float
 getChance()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 static RandomRowFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setChance(floatchance)
 Set the chance that a row is included.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
@@ -302,7 +310,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, 

hbase-site git commit: INFRA-10751 Empty commit

2018-08-24 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 0cf79db0e -> b8bc22fd3


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b8bc22fd
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b8bc22fd
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b8bc22fd

Branch: refs/heads/asf-site
Commit: b8bc22fd30e26fef9222f643b370e623406579a9
Parents: 0cf79db
Author: jenkins 
Authored: Fri Aug 24 14:47:34 2018 +
Committer: jenkins 
Committed: Fri Aug 24 14:47:34 2018 +

--

--




[03/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index ad7c82a..1dfa7b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -29,307 +29,322 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CompareOperator;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.hadoop.hbase.util.Bytes;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030
-031import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-032
-033import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-037
-038/**
-039 * This is a generic filter to be used to 
filter by comparison.  It takes an
-040 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-041 * p
-042 * To filter by row key, use {@link 
RowFilter}.
-043 * p
-044 * To filter by column family, use {@link 
FamilyFilter}.
-045 * p
-046 * To filter by column qualifier, use 
{@link QualifierFilter}.
-047 * p
-048 * To filter by value, use {@link 
ValueFilter}.
-049 * p
-050 * These filters can be wrapped with 
{@link SkipFilter} and {@link WhileMatchFilter}
-051 * to add more control.
-052 * p
-053 * Multiple filters can be combined using 
{@link FilterList}.
-054 */
-055@InterfaceAudience.Public
-056public abstract class CompareFilter 
extends FilterBase {
-057  /**
-058   * Comparison operators. For filters 
only!
-059   * Use {@link CompareOperator} 
otherwise.
-060   * It (intentionally) has at least the 
below enums with same names.
-061   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link CompareOperator} instead.
-062   */
-063  @Deprecated
-064  @InterfaceAudience.Public
-065  public enum CompareOp {
-066/** less than */
-067LESS,
-068/** less than or equal to */
-069LESS_OR_EQUAL,
-070/** equals */
-071EQUAL,
-072/** not equal */
-073NOT_EQUAL,
-074/** greater than or equal to */
-075GREATER_OR_EQUAL,
-076/** greater than */
-077GREATER,
-078/** no operation */
-079NO_OP,
-080  }
-081
-082  protected CompareOperator op;
-083  protected ByteArrayComparable 
comparator;
-084
-085  /**
-086   * Constructor.
-087   * @param compareOp the compare op for 
row matching
-088   * @param comparator the comparator for 
row matching
-089   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use other constructor.
-090   */
-091  @Deprecated
-092  public CompareFilter(final CompareOp 
compareOp,
-093  final ByteArrayComparable 
comparator) {
-094
this(CompareOperator.valueOf(compareOp.name()), comparator);
-095  }
-096
-097  /**
-098   * Constructor.
-099   * @param op the compare op for row 
matching
-100   * @param comparator the comparator for 
row matching
-101   */
-102  public CompareFilter(final 
CompareOperator op,
-103   final 
ByteArrayComparable comparator) {
-104this.op = op;
-105this.comparator = comparator;
-106  }
-107
-108  /**
-109   * @return operator
-110   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link #getCompareOperator()} instead.
-111   */
-112  @Deprecated
-113  public CompareOp getOperator() {
-114return 
CompareOp.valueOf(op.name());
-115  }
-116
-117  public CompareOperator 
getCompareOperator() {
-118return op;
-119  }
-120
-121  /**
-122   * @return the comparator
-123   */
-124  public ByteArrayComparable 
getComparator() {
-125return comparator;
-126  }
-127
-128  @Override
-129  public boolean filterRowKey(Cell cell) 
throws IOException {
-130// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-131return false;
-132  }
-133
-134  /**
-135   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0.
-136   * Use {@link 
#compareRow(CompareOperator, ByteArrayComparable, Cell)}
-137   */
-138  @Deprecated
-139  protected boolean compareRow(final 
CompareOp compareOp, final ByteArrayComparable comparator,
-140  final Cell cell) {
-141if (compareOp == CompareOp.NO_OP) {
-142  return true;
-143}
-144int compareResult = 
PrivateCellUtil.compareRow(cell, comparator);
-145return compare(compareOp, 
compareResult);
-146  }
-147
-148  protected boolean compareRow(final 
CompareOperator op, final 

[02/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
index ad7c82a..1dfa7b8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -29,307 +29,322 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CompareOperator;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.hadoop.hbase.util.Bytes;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030
-031import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-032
-033import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-037
-038/**
-039 * This is a generic filter to be used to 
filter by comparison.  It takes an
-040 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-041 * p
-042 * To filter by row key, use {@link 
RowFilter}.
-043 * p
-044 * To filter by column family, use {@link 
FamilyFilter}.
-045 * p
-046 * To filter by column qualifier, use 
{@link QualifierFilter}.
-047 * p
-048 * To filter by value, use {@link 
ValueFilter}.
-049 * p
-050 * These filters can be wrapped with 
{@link SkipFilter} and {@link WhileMatchFilter}
-051 * to add more control.
-052 * p
-053 * Multiple filters can be combined using 
{@link FilterList}.
-054 */
-055@InterfaceAudience.Public
-056public abstract class CompareFilter 
extends FilterBase {
-057  /**
-058   * Comparison operators. For filters 
only!
-059   * Use {@link CompareOperator} 
otherwise.
-060   * It (intentionally) has at least the 
below enums with same names.
-061   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link CompareOperator} instead.
-062   */
-063  @Deprecated
-064  @InterfaceAudience.Public
-065  public enum CompareOp {
-066/** less than */
-067LESS,
-068/** less than or equal to */
-069LESS_OR_EQUAL,
-070/** equals */
-071EQUAL,
-072/** not equal */
-073NOT_EQUAL,
-074/** greater than or equal to */
-075GREATER_OR_EQUAL,
-076/** greater than */
-077GREATER,
-078/** no operation */
-079NO_OP,
-080  }
-081
-082  protected CompareOperator op;
-083  protected ByteArrayComparable 
comparator;
-084
-085  /**
-086   * Constructor.
-087   * @param compareOp the compare op for 
row matching
-088   * @param comparator the comparator for 
row matching
-089   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use other constructor.
-090   */
-091  @Deprecated
-092  public CompareFilter(final CompareOp 
compareOp,
-093  final ByteArrayComparable 
comparator) {
-094
this(CompareOperator.valueOf(compareOp.name()), comparator);
-095  }
-096
-097  /**
-098   * Constructor.
-099   * @param op the compare op for row 
matching
-100   * @param comparator the comparator for 
row matching
-101   */
-102  public CompareFilter(final 
CompareOperator op,
-103   final 
ByteArrayComparable comparator) {
-104this.op = op;
-105this.comparator = comparator;
-106  }
-107
-108  /**
-109   * @return operator
-110   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link #getCompareOperator()} instead.
-111   */
-112  @Deprecated
-113  public CompareOp getOperator() {
-114return 
CompareOp.valueOf(op.name());
-115  }
-116
-117  public CompareOperator 
getCompareOperator() {
-118return op;
-119  }
-120
-121  /**
-122   * @return the comparator
-123   */
-124  public ByteArrayComparable 
getComparator() {
-125return comparator;
-126  }
-127
-128  @Override
-129  public boolean filterRowKey(Cell cell) 
throws IOException {
-130// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-131return false;
-132  }
-133
-134  /**
-135   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0.
-136   * Use {@link 
#compareRow(CompareOperator, ByteArrayComparable, Cell)}
-137   */
-138  @Deprecated
-139  protected boolean compareRow(final 
CompareOp compareOp, final ByteArrayComparable comparator,
-140  final Cell cell) {
-141if (compareOp == CompareOp.NO_OP) {
-142  return true;
-143}
-144int compareResult = 
PrivateCellUtil.compareRow(cell, comparator);
-145return compare(compareOp, 
compareResult);
-146  }
-147
-148  protected boolean compareRow(final 
CompareOperator op, final ByteArrayComparable comparator,
-149   

[51/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/0cf79db0
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/0cf79db0
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/0cf79db0

Branch: refs/heads/asf-site
Commit: 0cf79db0e22536afb29d872c4829058645b6b6c2
Parents: 455e329
Author: jenkins 
Authored: Fri Aug 24 14:46:57 2018 +
Committer: jenkins 
Committed: Fri Aug 24 14:46:57 2018 +

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 apidocs/index-all.html  |  108 +
 .../hbase/filter/ColumnCountGetFilter.html  |   80 +-
 .../hbase/filter/ColumnPaginationFilter.html|   90 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.html |   82 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.html  |  104 +-
 .../hadoop/hbase/filter/ColumnValueFilter.html  |   90 +-
 .../hbase/filter/CompareFilter.CompareOp.html   |   16 +-
 .../hadoop/hbase/filter/CompareFilter.html  |   88 +-
 .../hbase/filter/DependentColumnFilter.html |  116 +-
 .../hadoop/hbase/filter/FamilyFilter.html   |   62 +-
 .../hbase/filter/FilterList.Operator.html   |   10 +-
 .../apache/hadoop/hbase/filter/FilterList.html  |  134 +-
 .../FirstKeyValueMatchingQualifiersFilter.html  |   64 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.html |   70 +-
 .../hbase/filter/InclusiveStopFilter.html   |   76 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.html  |   70 +-
 .../filter/MultiRowRangeFilter.RowRange.html|   72 +-
 .../hbase/filter/MultiRowRangeFilter.html   |   78 +-
 .../filter/MultipleColumnPrefixFilter.html  |   90 +-
 .../apache/hadoop/hbase/filter/PageFilter.html  |   84 +-
 .../hadoop/hbase/filter/PrefixFilter.html   |   90 +-
 .../hadoop/hbase/filter/QualifierFilter.html|   62 +-
 .../hadoop/hbase/filter/RandomRowFilter.html|   92 +-
 .../apache/hadoop/hbase/filter/RowFilter.html   |   74 +-
 .../filter/SingleColumnValueExcludeFilter.html  |4 +-
 .../hbase/filter/SingleColumnValueFilter.html   |  146 +-
 .../apache/hadoop/hbase/filter/SkipFilter.html  |   90 +-
 .../hadoop/hbase/filter/TimestampsFilter.html   |   86 +-
 .../apache/hadoop/hbase/filter/ValueFilter.html |   62 +-
 .../hadoop/hbase/filter/WhileMatchFilter.html   |   98 +-
 .../hbase/filter/ColumnCountGetFilter.html  |  239 +-
 .../hbase/filter/ColumnPaginationFilter.html|  437 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.html |  297 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.html  |  450 +-
 .../hadoop/hbase/filter/ColumnValueFilter.html  |  452 +-
 .../hbase/filter/CompareFilter.CompareOp.html   |  617 +--
 .../hadoop/hbase/filter/CompareFilter.html  |  617 +--
 .../hbase/filter/DependentColumnFilter.html |  596 +--
 .../hadoop/hbase/filter/FamilyFilter.html   |  267 +-
 .../hbase/filter/FilterList.Operator.html   |  522 +-
 .../apache/hadoop/hbase/filter/FilterList.html  |  522 +-
 .../FirstKeyValueMatchingQualifiersFilter.html  |  247 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.html | 1265 ++---
 .../hbase/filter/InclusiveStopFilter.html   |  239 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.html  |  848 ++--
 .../filter/MultiRowRangeFilter.RowRange.html| 1026 ++--
 .../hbase/filter/MultiRowRangeFilter.html   | 1026 ++--
 .../filter/MultipleColumnPrefixFilter.html  |  394 +-
 .../apache/hadoop/hbase/filter/PageFilter.html  |  277 +-
 .../hadoop/hbase/filter/PrefixFilter.html   |  289 +-
 .../hadoop/hbase/filter/QualifierFilter.html|  261 +-
 .../hadoop/hbase/filter/RandomRowFilter.html|  287 +-
 .../apache/hadoop/hbase/filter/RowFilter.html   |  295 +-
 .../hbase/filter/SingleColumnValueFilter.html   |  896 ++--
 .../apache/hadoop/hbase/filter/SkipFilter.html  |  299 +-
 .../hadoop/hbase/filter/TimestampsFilter.html   |  451 +-
 .../apache/hadoop/hbase/filter/ValueFilter.html |  253 +-
 .../hadoop/hbase/filter/WhileMatchFilter.html   |  303 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   | 1552 +++---
 coc.html|4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |6 +-
 devapidocs/index-all.html   |  130 +
 .../hadoop/hbase/backup/package-tree.html   |2 +-
 .../hadoop/hbase/client/package-tree.html   |   26 +-
 .../hbase/filter/ColumnCountGetFilter.html  |   86 +-
 .../hbase/filter/ColumnPaginationFilter.html

[50/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
index d326120..3641be1 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class ColumnPrefixFilter
+public class ColumnPrefixFilter
 extends org.apache.hadoop.hbase.filter.FilterBase
 This filter is used for selecting only those keys with 
columns that matches
  a particular prefix. For example, if prefix is 'an', it will pass keys with
@@ -206,49 +206,57 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellcell)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterColumn(Cellcell)
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 Cell
 getNextCellHint(Cellcell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
 byte[]
 getPrefix()
 
-
+
+int
+hashCode()
+
+
 static ColumnPrefixFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -274,7 +282,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, 

[36/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
index 8dfb5f1..b025b7e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
@@ -31,260 +31,274 @@
 023import java.util.Arrays;
 024import java.util.Collections;
 025import java.util.List;
-026
-027import org.apache.hadoop.hbase.Cell;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-034
-035/**
-036 * Implementation of {@link Filter} that 
represents an ordered List of Filters which will be
-037 * evaluated with a specified boolean 
operator {@link Operator#MUST_PASS_ALL} (codeAND/code) or
-038 * {@link Operator#MUST_PASS_ONE} 
(codeOR/code). Since you can use Filter Lists as children of
-039 * Filter Lists, you can create a 
hierarchy of filters to be evaluated. br
-040 * {@link Operator#MUST_PASS_ALL} 
evaluates lazily: evaluation stops as soon as one filter does not
-041 * include the Cell. br
-042 * {@link Operator#MUST_PASS_ONE} 
evaluates non-lazily: all filters are always evaluated. br
-043 * Defaults to {@link 
Operator#MUST_PASS_ALL}.
-044 */
-045@InterfaceAudience.Public
-046final public class FilterList extends 
FilterBase {
-047
-048  /** set operator */
-049  @InterfaceAudience.Public
-050  public enum Operator {
-051/** !AND */
-052MUST_PASS_ALL,
-053/** !OR */
-054MUST_PASS_ONE
-055  }
-056
-057  private Operator operator;
-058  private FilterListBase 
filterListBase;
-059
-060  /**
-061   * Constructor that takes a set of 
{@link Filter}s and an operator.
-062   * @param operator Operator to process 
filter set with.
-063   * @param filters Set of row filters.
-064   */
-065  public FilterList(final Operator 
operator, final ListFilter filters) {
-066if (operator == 
Operator.MUST_PASS_ALL) {
-067  filterListBase = new 
FilterListWithAND(filters);
-068} else if (operator == 
Operator.MUST_PASS_ONE) {
-069  filterListBase = new 
FilterListWithOR(filters);
-070} else {
-071  throw new 
IllegalArgumentException("Invalid operator: " + operator);
-072}
-073this.operator = operator;
-074  }
-075
-076  /**
-077   * Constructor that takes a set of 
{@link Filter}s. The default operator MUST_PASS_ALL is assumed.
-078   * All filters are cloned to internal 
list.
-079   * @param filters list of filters
-080   */
-081  public FilterList(final 
ListFilter filters) {
-082this(Operator.MUST_PASS_ALL, 
filters);
-083  }
-084
-085  /**
-086   * Constructor that takes a var arg 
number of {@link Filter}s. The default operator MUST_PASS_ALL
-087   * is assumed.
-088   * @param filters
-089   */
-090  public FilterList(final Filter... 
filters) {
-091this(Operator.MUST_PASS_ALL, 
Arrays.asList(filters));
-092  }
-093
-094  /**
-095   * Constructor that takes an 
operator.
-096   * @param operator Operator to process 
filter set with.
-097   */
-098  public FilterList(final Operator 
operator) {
-099this(operator, new 
ArrayList());
-100  }
-101
-102  /**
-103   * Constructor that takes a var arg 
number of {@link Filter}s and an operator.
-104   * @param operator Operator to process 
filter set with.
-105   * @param filters Filters to use
-106   */
-107  public FilterList(final Operator 
operator, final Filter... filters) {
-108this(operator, 
Arrays.asList(filters));
-109  }
-110
-111  /**
-112   * Get the operator.
-113   * @return operator
-114   */
-115  public Operator getOperator() {
-116return operator;
-117  }
-118
-119  /**
-120   * Get the filters.
-121   * @return filters
-122   */
-123  public ListFilter getFilters() 
{
-124return filterListBase.getFilters();
-125  }
-126
-127  public int size() {
-128return filterListBase.size();
-129  }
-130
-131  public void 
addFilter(ListFilter filters) {
-132
filterListBase.addFilterLists(filters);
-133  }
-134
-135  /**
-136   * Add a filter.
-137   * @param filter another filter
-138   */
-139  public void addFilter(Filter filter) 
{
-140
addFilter(Collections.singletonList(filter));
-141  }
-142
-143  @Override
-144  public void reset() throws IOException 
{
-145filterListBase.reset();
-146  }
-147
-148  @Override
-149  public boolean filterRowKey(byte[] 
rowKey, int offset, int length) throws IOException {
-150return 
filterListBase.filterRowKey(rowKey, offset, length);
-151  

[34/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
index d66ef6c..d9cda57 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
@@ -29,632 +29,647 @@
 021import java.util.Arrays;
 022import java.util.Comparator;
 023import java.util.List;
-024import java.util.PriorityQueue;
-025
-026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.PrivateCellUtil;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036import 
org.apache.hadoop.hbase.util.Pair;
-037import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-038import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-039
-040import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-041
-042/**
-043 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-044 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-045 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-046 * ul
-047 * li0 - means that this byte in 
provided row key is fixed, i.e. row key's byte at same position
-048 * must match/li
-049 * li1 - means that this byte in 
provided row key is NOT fixed, i.e. row key's byte at this
-050 * position can be different from the one 
in provided row key/li
-051 * /ul
-052 * Example: Let's assume row key format 
is userId_actionId_year_month. Length of userId is fixed and
-053 * is 4, length of actionId is 2 and year 
and month are 4 and 2 bytes long respectively. Let's
-054 * assume that we need to fetch all users 
that performed certain action (encoded as "99") in Jan of
-055 * any year. Then the pair (row key, 
fuzzy info) would be the following: row key = "_99__01"
-056 * (one can use any value instead of "?") 
fuzzy info =
-057 * 
"\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info 
tells the matching
-058 * mask is "_99__01", where at ? 
can be any value.
-059 */
-060@InterfaceAudience.Public
-061public class FuzzyRowFilter extends 
FilterBase {
-062  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-063  private ListPairbyte[], 
byte[] fuzzyKeysData;
-064  private boolean done = false;
-065
-066  /**
-067   * The index of a last successfully 
found matching fuzzy string (in fuzzyKeysData). We will start
-068   * matching next KV with this one. If 
they do not match then we will return back to the one-by-one
-069   * iteration over fuzzyKeysData.
-070   */
-071  private int lastFoundIndex = -1;
-072
-073  /**
-074   * Row tracker (keeps all next rows 
after SEEK_NEXT_USING_HINT was returned)
-075   */
-076  private RowTracker tracker;
-077
-078  public 
FuzzyRowFilter(ListPairbyte[], byte[] fuzzyKeysData) {
-079ListPairbyte[], 
byte[] fuzzyKeyDataCopy = new 
ArrayList(fuzzyKeysData.size());
-080
-081for (Pairbyte[], byte[] 
aFuzzyKeysData : fuzzyKeysData) {
-082  if 
(aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) {
-083PairString, String 
readable =
-084  new 
Pair(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), 
Bytes.toStringBinary(aFuzzyKeysData.getSecond()));
-085throw new 
IllegalArgumentException("Fuzzy pair lengths do not match: " + readable);
-086  }
-087
-088  Pairbyte[], byte[] p = new 
Pair();
-089  // create a copy of pair bytes so 
that they are not modified by the filter.
-090  
p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), 
aFuzzyKeysData.getFirst().length));
-091  
p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), 
aFuzzyKeysData.getSecond().length));
-092
-093  // update mask ( 0 - -1 (0xff), 
1 - 2)
-094  
p.setSecond(preprocessMask(p.getSecond()));
-095  preprocessSearchKey(p);
-096
-097  fuzzyKeyDataCopy.add(p);
-098}
-099this.fuzzyKeysData = 
fuzzyKeyDataCopy;
-100this.tracker = new RowTracker();
-101  }
-102
+024import java.util.Objects;
+025import java.util.PriorityQueue;
+026
+027import org.apache.hadoop.hbase.Cell;
+028import 
org.apache.hadoop.hbase.CellComparator;
+029import 

[18/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
index 04bffdc..021000b 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterList.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public final class FilterList
+public final class FilterList
 extends FilterBase
 Implementation of Filter that represents an 
ordered List of Filters which will be
  evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL
 (AND) or
@@ -265,24 +265,28 @@ extends 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -290,14 +294,14 @@ extends 
 
 
-
+
 void
 filterRowCells(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellcells)
 Filters that never filter by modifying the returned List of 
Cells can inherit this
  implementation that does nothing.
 
 
-
+
 boolean
 filterRowKey(byte[]rowKey,
 intoffset,
@@ -306,82 +310,86 @@ extends 
 
 
-
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListFilter
 getFilters()
 Get the filters.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
 FilterList.Operator
 getOperator()
 Get the operator.
 
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 boolean
 isFamilyEssential(byte[]name)
 By default, we require all scan's column families to be 
present.
 
 
-
+
 boolean
 isReversed()
 
-
+
 static FilterList
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setReversed(booleanreversed)
 alter the reversed scan flag
 
 
-
+
 int
 size()
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 Cell
 transformCell(Cellc)
 By default no transformation takes place
@@ -402,7 +410,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or 

[05/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
index d206019..9475950 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
@@ -29,147 +29,162 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033
-034import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-035import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-036import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-037
-038/**
-039 * This filter is used for selecting only 
those keys with columns that matches
-040 * a particular prefix. For example, if 
prefix is 'an', it will pass keys with
-041 * columns like 'and', 'anti' but not 
keys with columns like 'ball', 'act'.
-042 */
-043@InterfaceAudience.Public
-044public class ColumnPrefixFilter extends 
FilterBase {
-045  protected byte [] prefix = null;
-046
-047  public ColumnPrefixFilter(final byte [] 
prefix) {
-048this.prefix = prefix;
-049  }
-050
-051  public byte[] getPrefix() {
-052return prefix;
-053  }
-054
-055  @Override
-056  public boolean filterRowKey(Cell cell) 
throws IOException {
-057// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-058return false;
-059  }
-060
-061  @Deprecated
-062  @Override
-063  public ReturnCode filterKeyValue(final 
Cell c) {
-064return filterCell(c);
-065  }
-066
-067  @Override
-068  public ReturnCode filterCell(final Cell 
cell) {
-069if (this.prefix == null) {
-070  return ReturnCode.INCLUDE;
-071} else {
-072  return filterColumn(cell);
-073}
-074  }
-075
-076  public ReturnCode filterColumn(Cell 
cell) {
-077int qualifierLength = 
cell.getQualifierLength();
-078if (qualifierLength  
prefix.length) {
-079  int cmp = 
compareQualifierPart(cell, qualifierLength, this.prefix);
-080  if (cmp = 0) {
-081return 
ReturnCode.SEEK_NEXT_USING_HINT;
-082  } else {
-083return ReturnCode.NEXT_ROW;
-084  }
-085} else {
-086  int cmp = 
compareQualifierPart(cell, this.prefix.length, this.prefix);
-087  if (cmp  0) {
-088return 
ReturnCode.SEEK_NEXT_USING_HINT;
-089  } else if (cmp  0) {
-090return ReturnCode.NEXT_ROW;
-091  } else {
-092return ReturnCode.INCLUDE;
-093  }
-094}
-095  }
-096
-097  private static int 
compareQualifierPart(Cell cell, int length, byte[] prefix) {
-098if (cell instanceof 
ByteBufferExtendedCell) {
-099  return 
ByteBufferUtils.compareTo(((ByteBufferExtendedCell) 
cell).getQualifierByteBuffer(),
-100  ((ByteBufferExtendedCell) 
cell).getQualifierPosition(), length, prefix, 0, length);
-101}
-102return 
Bytes.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), length, 
prefix, 0,
-103length);
-104  }
-105
-106  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-107
Preconditions.checkArgument(filterArguments.size() == 1,
-108"Expected 
1 but got: %s", filterArguments.size());
-109byte [] columnPrefix = 
ParseFilter.removeQuotesFromByteArray(filterArguments.get(0));
-110return new 
ColumnPrefixFilter(columnPrefix);
-111  }
-112
-113  /**
-114   * @return The filter serialized using 
pb
-115   */
-116  @Override
-117  public byte [] toByteArray() {
-118
FilterProtos.ColumnPrefixFilter.Builder builder =
-119  
FilterProtos.ColumnPrefixFilter.newBuilder();
-120if (this.prefix != null) 
builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix));
-121return 
builder.build().toByteArray();
-122  }
-123
-124  /**
-125   * @param pbBytes A pb serialized 
{@link ColumnPrefixFilter} instance
-126   * @return An instance of {@link 
ColumnPrefixFilter} made from codebytes/code
-127   * @throws 
org.apache.hadoop.hbase.exceptions.DeserializationException
-128   * @see #toByteArray
-129   */
-130  public static ColumnPrefixFilter 
parseFrom(final byte [] pbBytes)
-131  throws DeserializationException {
-132FilterProtos.ColumnPrefixFilter 
proto;
-133try {

[15/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/KeyOnlyFilter.KeyOnlyByteBufferExtendedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/KeyOnlyFilter.KeyOnlyByteBufferExtendedCell.html
 
b/devapidocs/org/apache/hadoop/hbase/filter/KeyOnlyFilter.KeyOnlyByteBufferExtendedCell.html
index b91abdd..96c4516 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/filter/KeyOnlyFilter.KeyOnlyByteBufferExtendedCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/filter/KeyOnlyFilter.KeyOnlyByteBufferExtendedCell.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class KeyOnlyFilter.KeyOnlyByteBufferExtendedCell
+static class KeyOnlyFilter.KeyOnlyByteBufferExtendedCell
 extends ByteBufferExtendedCell
 
 
@@ -425,7 +425,7 @@ extends 
 
 FIXED_OVERHEAD
-public static finalint FIXED_OVERHEAD
+public static finalint FIXED_OVERHEAD
 
 
 
@@ -434,7 +434,7 @@ extends 
 
 cell
-privateByteBufferExtendedCell cell
+privateByteBufferExtendedCell cell
 
 
 
@@ -443,7 +443,7 @@ extends 
 
 lenAsVal
-privateboolean lenAsVal
+privateboolean lenAsVal
 
 
 
@@ -460,7 +460,7 @@ extends 
 
 KeyOnlyByteBufferExtendedCell
-publicKeyOnlyByteBufferExtendedCell(ByteBufferExtendedCellc,
+publicKeyOnlyByteBufferExtendedCell(ByteBufferExtendedCellc,
  booleanlenAsVal)
 
 
@@ -478,7 +478,7 @@ extends 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -494,7 +494,7 @@ extends 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Returns:
 Array index of first row byte
@@ -507,7 +507,7 @@ extends 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Returns:
 Number of row bytes. Must be  rowArray.length - offset.
@@ -520,7 +520,7 @@ extends 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -536,7 +536,7 @@ extends 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Returns:
 Array index of first family byte
@@ -549,7 +549,7 @@ extends 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Returns:
 Number of family bytes.  Must be  familyArray.length - offset.
@@ -562,7 +562,7 @@ extends 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -577,7 +577,7 @@ extends 
 
 getQualifierOffset
-publicintgetQualifierOffset()
+publicintgetQualifierOffset()
 
 Returns:
 Array index of first qualifier byte
@@ -590,7 +590,7 @@ extends 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Returns:
 Number of qualifier bytes.  Must be  qualifierArray.length - 
offset.
@@ -603,7 +603,7 @@ extends 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Returns:
 Long value representing time at which this cell was "Put" into the row.  
Typically
@@ -617,7 +617,7 @@ extends 
 
 getTypeByte
-publicbytegetTypeByte()
+publicbytegetTypeByte()
 
 Returns:
 The byte representation of the KeyValue.TYPE of this cell: one of Put, 
Delete, etc
@@ -630,7 +630,7 @@ extends 
 
 setSequenceId
-publicvoidsetSequenceId(longseqId)
+publicvoidsetSequenceId(longseqId)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:ExtendedCell
 Sets with the given seqId.
@@ -648,7 +648,7 @@ extends 
 
 setTimestamp
-publicvoidsetTimestamp(longts)
+publicvoidsetTimestamp(longts)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:ExtendedCell
 Sets with the given timestamp.
@@ -666,7 +666,7 @@ extends 
 
 setTimestamp
-publicvoidsetTimestamp(byte[]ts)
+publicvoidsetTimestamp(byte[]ts)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:ExtendedCell
 Sets with the given timestamp.
@@ -684,7 +684,7 @@ extends 
 
 getSequenceId
-publiclonggetSequenceId()
+publiclonggetSequenceId()
 Description copied from 
interface:ExtendedCell
 A region-specific unique monotonically increasing sequence 
ID given to each Cell. It always
  exists for cells in the 

[07/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlFilter.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlFilter.html
index 37027c0..4e28610 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlFilter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessControlFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10,"i1":10,"i2":9,"i3":10,"i4":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":9,"i5":10,"i6":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AccessControlFilter
+class AccessControlFilter
 extends FilterBase
 NOTE: for internal use only by AccessController 
implementation
 
@@ -268,29 +268,37 @@ extends Method and Description
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellcell)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
+int
+hashCode()
+
+
 static AccessControlFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
@@ -316,7 +324,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.htm
 l?is-external=true#wait-long-" title="class or interface in 
java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
 
 
 
@@ -338,7 +346,7 @@ extends 
 
 authManager
-privateTableAuthManager 
authManager
+privateTableAuthManager 
authManager
 
 
 
@@ -347,7 +355,7 @@ extends 
 
 table
-privateTableName table
+privateTableName table
 
 
 
@@ -356,7 +364,7 @@ extends 
 
 user
-privateUser user
+privateUser user
 
 
 
@@ -365,7 +373,7 @@ extends 
 
 

[42/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
index c6137d0..4ca69da 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
@@ -29,118 +29,133 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.yetus.audience.InterfaceAudience;
-027import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-028import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-029
-030import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032
-033/**
-034 * Simple filter that returns first N 
columns on row only.
-035 * This filter was written to test 
filters in Get and as soon as it gets
-036 * its quota of columns, {@link 
#filterAllRemaining()} returns true.  This
-037 * makes this filter unsuitable as a Scan 
filter.
-038 */
-039@InterfaceAudience.Public
-040public class ColumnCountGetFilter extends 
FilterBase {
-041  private int limit = 0;
-042  private int count = 0;
-043
-044  public ColumnCountGetFilter(final int 
n) {
-045Preconditions.checkArgument(n = 
0, "limit be positive %s", n);
-046this.limit = n;
-047  }
-048
-049  public int getLimit() {
-050return limit;
-051  }
-052
-053  @Override
-054  public boolean filterRowKey(Cell cell) 
throws IOException {
-055// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-056if (filterAllRemaining()) return 
true;
-057return false;
-058  }
-059
-060  @Override
-061  public boolean filterAllRemaining() {
-062return this.count  this.limit;
-063  }
-064
-065  @Deprecated
-066  @Override
-067  public ReturnCode filterKeyValue(final 
Cell c) {
-068return filterCell(c);
-069  }
-070
-071  @Override
-072  public ReturnCode filterCell(final Cell 
c) {
-073this.count++;
-074return filterAllRemaining() ? 
ReturnCode.NEXT_COL : ReturnCode.INCLUDE_AND_NEXT_COL;
-075  }
-076
-077  @Override
-078  public void reset() {
-079this.count = 0;
-080  }
-081
-082  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-083
Preconditions.checkArgument(filterArguments.size() == 1,
-084"Expected 
1 but got: %s", filterArguments.size());
-085int limit = 
ParseFilter.convertByteArrayToInt(filterArguments.get(0));
-086return new 
ColumnCountGetFilter(limit);
-087  }
-088
-089  /**
-090   * @return The filter serialized using 
pb
-091   */
-092  @Override
-093  public byte [] toByteArray() {
-094
FilterProtos.ColumnCountGetFilter.Builder builder =
-095  
FilterProtos.ColumnCountGetFilter.newBuilder();
-096builder.setLimit(this.limit);
-097return 
builder.build().toByteArray();
-098  }
-099
-100  /**
-101   * @param pbBytes A pb serialized 
{@link ColumnCountGetFilter} instance
-102   * @return An instance of {@link 
ColumnCountGetFilter} made from codebytes/code
-103   * @throws 
org.apache.hadoop.hbase.exceptions.DeserializationException
-104   * @see #toByteArray
-105   */
-106  public static ColumnCountGetFilter 
parseFrom(final byte [] pbBytes)
-107  throws DeserializationException {
-108FilterProtos.ColumnCountGetFilter 
proto;
-109try {
-110  proto = 
FilterProtos.ColumnCountGetFilter.parseFrom(pbBytes);
-111} catch 
(InvalidProtocolBufferException e) {
-112  throw new 
DeserializationException(e);
-113}
-114return new 
ColumnCountGetFilter(proto.getLimit());
-115  }
-116
-117  /**
-118   * @param o the other filter to compare 
with
-119   * @return true if and only if the 
fields of the filter that are serialized
-120   * are equal to the corresponding 
fields in other.  Used for testing.
-121   */
-122  @Override
-123  boolean areSerializedFieldsEqual(Filter 
o) {
-124if (o == this) return true;
-125if (!(o instanceof 
ColumnCountGetFilter)) return false;
-126
-127ColumnCountGetFilter other = 
(ColumnCountGetFilter)o;
-128return this.getLimit() == 
other.getLimit();
-129  }
-130
-131  @Override
-132  public String toString() {
-133return 
this.getClass().getSimpleName() + " " + this.limit;
-134  }
-135}
+024import java.util.Objects;
+025
+026import org.apache.hadoop.hbase.Cell;
+027import 
org.apache.yetus.audience.InterfaceAudience;
+028import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+029import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+030
+031import 

[28/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/PrefixFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/PrefixFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/PrefixFilter.html
index 96bdd09..2553d09 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/PrefixFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/PrefixFilter.html
@@ -28,143 +28,158 @@
 020package org.apache.hadoop.hbase.filter;
 021
 022import java.util.ArrayList;
-023
-024import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.yetus.audience.InterfaceAudience;
-027import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-028import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-029import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-030import 
org.apache.hadoop.hbase.util.Bytes;
-031
-032import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-033import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-034import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-035
-036/**
-037 * Pass results that have same row 
prefix.
-038 */
-039@InterfaceAudience.Public
-040public class PrefixFilter extends 
FilterBase {
-041  protected byte [] prefix = null;
-042  protected boolean passedPrefix = 
false;
-043  protected boolean filterRow = true;
-044
-045  public PrefixFilter(final byte [] 
prefix) {
-046this.prefix = prefix;
-047  }
-048
-049  public byte[] getPrefix() {
-050return prefix;
-051  }
-052
-053  @Override
-054  public boolean filterRowKey(Cell 
firstRowCell) {
-055if (firstRowCell == null || 
this.prefix == null)
-056  return true;
-057if (filterAllRemaining()) return 
true;
-058int length = 
firstRowCell.getRowLength();
-059if (length  prefix.length) return 
true;
-060// if they are equal, return false 
= pass row
-061// else return true, filter row
-062// if we are passed the prefix, set 
flag
-063int cmp;
-064if (firstRowCell instanceof 
ByteBufferExtendedCell) {
-065  cmp = 
ByteBufferUtils.compareTo(((ByteBufferExtendedCell) 
firstRowCell).getRowByteBuffer(),
-066  ((ByteBufferExtendedCell) 
firstRowCell).getRowPosition(), this.prefix.length,
-067  this.prefix, 0, 
this.prefix.length);
-068} else {
-069  cmp = 
Bytes.compareTo(firstRowCell.getRowArray(), firstRowCell.getRowOffset(),
-070  this.prefix.length, 
this.prefix, 0, this.prefix.length);
-071}
-072if ((!isReversed()  cmp 
 0) || (isReversed()  cmp  0)) {
-073  passedPrefix = true;
-074}
-075filterRow = (cmp != 0);
-076return filterRow;
-077  }
-078
-079  @Deprecated
-080  @Override
-081  public ReturnCode filterKeyValue(final 
Cell c) {
-082return filterCell(c);
-083  }
-084
-085  @Override
-086  public ReturnCode filterCell(final Cell 
c) {
-087if (filterRow) return 
ReturnCode.NEXT_ROW;
-088return ReturnCode.INCLUDE;
-089  }
-090
-091  @Override
-092  public boolean filterRow() {
-093return filterRow;
-094  }
-095
-096  @Override
-097  public void reset() {
-098filterRow = true;
-099  }
-100
-101  @Override
-102  public boolean filterAllRemaining() {
-103return passedPrefix;
-104  }
-105
-106  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-107
Preconditions.checkArgument(filterArguments.size() == 1,
-108"Expected 
1 but got: %s", filterArguments.size());
-109byte [] prefix = 
ParseFilter.removeQuotesFromByteArray(filterArguments.get(0));
-110return new PrefixFilter(prefix);
-111  }
-112
-113  /**
-114   * @return The filter serialized using 
pb
-115   */
-116  @Override
-117  public byte [] toByteArray() {
-118FilterProtos.PrefixFilter.Builder 
builder =
-119  
FilterProtos.PrefixFilter.newBuilder();
-120if (this.prefix != null) 
builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix));
-121return 
builder.build().toByteArray();
-122  }
-123
-124  /**
-125   * @param pbBytes A pb serialized 
{@link PrefixFilter} instance
-126   * @return An instance of {@link 
PrefixFilter} made from codebytes/code
-127   * @throws 
org.apache.hadoop.hbase.exceptions.DeserializationException
-128   * @see #toByteArray
-129   */
-130  public static PrefixFilter 
parseFrom(final byte [] pbBytes)
-131  throws DeserializationException {
-132FilterProtos.PrefixFilter proto;
-133try {
-134  proto = 
FilterProtos.PrefixFilter.parseFrom(pbBytes);
-135} catch 
(InvalidProtocolBufferException e) {
-136  throw new 
DeserializationException(e);
-137}
-138return new 
PrefixFilter(proto.hasPrefix()?proto.getPrefix().toByteArray():null);
-139  }
-140
-141  /**
-142   * @param o the other 

[47/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
index 5d794f4..b37a5d1 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":9,"i7":10,"i8":10};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class InclusiveStopFilter
+public class InclusiveStopFilter
 extends org.apache.hadoop.hbase.filter.FilterBase
 A Filter that stops after the given row.  There is no 
"RowStopFilter" because
  the Scan spec allows you to specify a stop row.
@@ -197,44 +197,52 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 byte[]
 getStopRowKey()
 
-
+
+int
+hashCode()
+
+
 static InclusiveStopFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -260,7 +268,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.htm
 

[46/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
index 8b1ecf3..f1e0686 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class MultipleColumnPrefixFilter
+public class MultipleColumnPrefixFilter
 extends org.apache.hadoop.hbase.filter.FilterBase
 This filter is used for selecting only those keys with 
columns that matches
  a particular prefix. For example, if prefix is 'an', it will pass keys will
@@ -214,55 +214,63 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 createTreeSet()
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterColumn(Cellcell)
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 Cell
 getNextCellHint(Cellcell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
 byte[][]
 getPrefix()
 
-
+
+int
+hashCode()
+
+
 static MultipleColumnPrefixFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString(intmaxPrefixes)
 
@@ -286,7 +294,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 

[10/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
index dae8455..f103137 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class TimestampsFilter
+public class TimestampsFilter
 extends FilterBase
 Filter that returns only cells whose timestamp (version) is
  in the specified list of timestamps (versions).
@@ -239,60 +239,68 @@ extends createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 long
 getMin()
 Gets the minimum timestamp requested by filter.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Pick the next cell that the scanner should seek to.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getTimestamps()
 
-
+
+int
+hashCode()
+
+
 private void
 init()
 
-
+
 static TimestampsFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString(intmaxTimestamps)
 
@@ -316,7 +324,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, 

[11/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
index dec3733..037ad83 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":9,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":42,"i14":10,"i15":10,"i16":10,"i17":9,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
+var methods = 
{"i0":10,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":42,"i15":10,"i16":10,"i17":10,"i18":10,"i19":9,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class SingleColumnValueFilter
+public class SingleColumnValueFilter
 extends FilterBase
 This filter is used to filter cells based on value. It 
takes a CompareFilter.CompareOp
  operator (equal, greater, not equal, etc), and either a byte [] value or
@@ -337,22 +337,26 @@ extends createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 private boolean
 filterColumnValue(Cellcell)
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -360,37 +364,37 @@ extends 
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 ByteArrayComparable
 getComparator()
 
-
+
 CompareOperator
 getCompareOperator()
 
-
+
 byte[]
 getFamily()
 
-
+
 boolean
 getFilterIfMissing()
 Get whether entire row should be filtered if column is not 
found.
 
 
-
+
 boolean
 getLatestVersionOnly()
 Get whether only the latest version of the column value 
should be compared.
 
 
-
+
 CompareFilter.CompareOp
 getOperator()
 Deprecated.
@@ -398,53 +402,57 @@ extends 
 
 
-
+
 byte[]
 getQualifier()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 boolean
 isFamilyEssential(byte[]name)
 The only CF this filter needs is given column family.
 
 
-
+
 static SingleColumnValueFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setFilterIfMissing(booleanfilterIfMissing)
 Set whether entire row should be filtered if column is not 
found.
 
 
-
+
 void
 setLatestVersionOnly(booleanlatestVersionOnly)
 Set whether only the latest version of the column value 
should be compared.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -470,7 +478,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 

[04/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
index ee5595f..961079a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
@@ -29,224 +29,240 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CellUtil;
-027import 
org.apache.hadoop.hbase.CompareOperator;
-028import 
org.apache.hadoop.hbase.PrivateCellUtil;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hadoop.hbase.util.Bytes;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032
-033import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-034import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-035import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-036
-037import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-040
-041/**
-042 * Different from {@link 
SingleColumnValueFilter} which returns an bentire/b row
-043 * when specified condition is matched, 
{@link ColumnValueFilter} return the matched cell only.
-044 * p
-045 * This filter is used to filter cells 
based on column and value.
-046 * It takes a {@link 
org.apache.hadoop.hbase.CompareOperator} operator (, =, =, !=, , 
=), and
-047 * and a {@link ByteArrayComparable} 
comparator.
-048 */
-049@InterfaceAudience.Public
-050public class ColumnValueFilter extends 
FilterBase {
-051  private final byte[] family;
-052  private final byte[] qualifier;
-053  private final CompareOperator op;
-054  private final ByteArrayComparable 
comparator;
-055
-056  // This flag is used to speed up 
seeking cells when matched column is found, such that following
-057  // columns in the same row can be 
skipped faster by NEXT_ROW instead of NEXT_COL.
-058  private boolean columnFound = false;
-059
-060  public ColumnValueFilter(final byte[] 
family, final byte[] qualifier,
-061   final 
CompareOperator op, final byte[] value) {
-062this(family, qualifier, op, new 
BinaryComparator(value));
-063  }
-064
-065  public ColumnValueFilter(final byte[] 
family, final byte[] qualifier,
-066   final 
CompareOperator op,
-067   final 
ByteArrayComparable comparator) {
-068this.family = 
Preconditions.checkNotNull(family, "family should not be null.");
-069this.qualifier = qualifier == null ? 
new byte[0] : qualifier;
-070this.op = 
Preconditions.checkNotNull(op, "CompareOperator should not be null");
-071this.comparator = 
Preconditions.checkNotNull(comparator, "Comparator should not be null");
-072  }
-073
-074  /**
-075   * @return operator
-076   */
-077  public CompareOperator 
getCompareOperator() {
-078return op;
-079  }
-080
-081  /**
-082   * @return the comparator
-083   */
-084  public ByteArrayComparable 
getComparator() {
-085return comparator;
-086  }
-087
-088  /**
-089   * @return the column family
-090   */
-091  public byte[] getFamily() {
-092return family;
-093  }
-094
-095  /**
-096   * @return the qualifier
-097   */
-098  public byte[] getQualifier() {
-099return qualifier;
-100  }
-101
-102  @Override
-103  public void reset() throws IOException 
{
-104columnFound = false;
-105  }
-106
-107  @Override
-108  public boolean filterRowKey(Cell cell) 
throws IOException {
-109return false;
-110  }
-111
-112  @Override
-113  public ReturnCode filterCell(Cell c) 
throws IOException {
-114// 1. Check column match
-115if (!CellUtil.matchingColumn(c, 
this.family, this.qualifier)) {
-116  return columnFound ? 
ReturnCode.NEXT_ROW : ReturnCode.NEXT_COL;
-117}
-118// Column found
-119columnFound = true;
-120// 2. Check value match:
-121// True means filter out, just skip 
this cell, else include it.
-122return 
compareValue(getCompareOperator(), getComparator(), c) ?
-123  ReturnCode.SKIP : 
ReturnCode.INCLUDE;
-124  }
-125
-126  /**
-127   * This method is used to determine a 
cell should be included or filtered out.
-128   * @param op one of operators {@link 
CompareOperator}
-129   * @param comparator comparator used to 
compare cells.
-130   * @param cell cell to be compared.
-131   * @return true means cell should be 
filtered out, included otherwise.
-132   */
-133  private boolean compareValue(final 
CompareOperator op, final 

[31/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
index 4804996..c3cf972 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
@@ -28,500 +28,538 @@
 020import java.util.ArrayList;
 021import java.util.Collections;
 022import java.util.List;
-023
-024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellUtil;
-026import 
org.apache.hadoop.hbase.HConstants;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035/**
-036 * Filter to support scan multiple row 
key ranges. It can construct the row key ranges from the
-037 * passed list which can be accessed by 
each region server.
-038 *
-039 * HBase is quite efficient when scanning 
only one small row key range. If user needs to specify
-040 * multiple row key ranges in one scan, 
the typical solutions are: 1. through FilterList which is a
-041 * list of row key Filters, 2. using the 
SQL layer over HBase to join with two table, such as hive,
-042 * phoenix etc. However, both solutions 
are inefficient. Both of them can't utilize the range info
-043 * to perform fast forwarding during scan 
which is quite time consuming. If the number of ranges
-044 * are quite big (e.g. millions), join is 
a proper solution though it is slow. However, there are
-045 * cases that user wants to specify a 
small number of ranges to scan (e.g. lt;1000 ranges). Both
-046 * solutions can't provide satisfactory 
performance in such case. MultiRowRangeFilter is to support
-047 * such usec ase (scan multiple row key 
ranges), which can construct the row key ranges from user
-048 * specified list and perform 
fast-forwarding during scan. Thus, the scan will be quite efficient.
-049 */
-050@InterfaceAudience.Public
-051public class MultiRowRangeFilter extends 
FilterBase {
-052
-053  private ListRowRange 
rangeList;
-054
-055  private static final int 
ROW_BEFORE_FIRST_RANGE = -1;
-056  private boolean EXCLUSIVE = false;
-057  private boolean done = false;
-058  private boolean initialized = false;
-059  private int index;
-060  private RowRange range;
-061  private ReturnCode currentReturnCode;
-062
-063  /**
-064   * @param list A list of 
codeRowRange/code
-065   */
-066  public 
MultiRowRangeFilter(ListRowRange list) {
-067this.rangeList = 
sortAndMerge(list);
-068  }
-069
-070  @Override
-071  public boolean filterAllRemaining() {
-072return done;
-073  }
-074
-075  public ListRowRange 
getRowRanges() {
-076return this.rangeList;
-077  }
-078
-079  @Override
-080  public boolean filterRowKey(Cell 
firstRowCell) {
-081if (filterAllRemaining()) return 
true;
-082// If it is the first time of 
running, calculate the current range index for
-083// the row key. If index is out of 
bound which happens when the start row
-084// user sets is after the largest 
stop row of the ranges, stop the scan.
-085// If row key is after the current 
range, find the next range and update index.
-086byte[] rowArr = 
firstRowCell.getRowArray();
-087int length = 
firstRowCell.getRowLength();
-088int offset = 
firstRowCell.getRowOffset();
-089if (!initialized
-090|| !range.contains(rowArr, 
offset, length)) {
-091  byte[] rowkey = 
CellUtil.cloneRow(firstRowCell);
-092  index = 
getNextRangeIndex(rowkey);
-093  if (index = rangeList.size()) 
{
-094done = true;
-095currentReturnCode = 
ReturnCode.NEXT_ROW;
-096return false;
-097  }
-098  if(index != ROW_BEFORE_FIRST_RANGE) 
{
-099range = rangeList.get(index);
-100  } else {
-101range = rangeList.get(0);
-102  }
-103  if (EXCLUSIVE) {
-104EXCLUSIVE = false;
-105currentReturnCode = 
ReturnCode.NEXT_ROW;
-106return false;
-107  }
-108  if (!initialized) {
-109if(index != 
ROW_BEFORE_FIRST_RANGE) {
-110  currentReturnCode = 
ReturnCode.INCLUDE;
-111} else {
-112  currentReturnCode = 
ReturnCode.SEEK_NEXT_USING_HINT;
-113}
-114initialized = true;
-115  } else {
-116if (range.contains(rowArr, 
offset, length)) {
-117  

[35/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
index 8dfb5f1..b025b7e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
@@ -31,260 +31,274 @@
 023import java.util.Arrays;
 024import java.util.Collections;
 025import java.util.List;
-026
-027import org.apache.hadoop.hbase.Cell;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-034
-035/**
-036 * Implementation of {@link Filter} that 
represents an ordered List of Filters which will be
-037 * evaluated with a specified boolean 
operator {@link Operator#MUST_PASS_ALL} (codeAND/code) or
-038 * {@link Operator#MUST_PASS_ONE} 
(codeOR/code). Since you can use Filter Lists as children of
-039 * Filter Lists, you can create a 
hierarchy of filters to be evaluated. br
-040 * {@link Operator#MUST_PASS_ALL} 
evaluates lazily: evaluation stops as soon as one filter does not
-041 * include the Cell. br
-042 * {@link Operator#MUST_PASS_ONE} 
evaluates non-lazily: all filters are always evaluated. br
-043 * Defaults to {@link 
Operator#MUST_PASS_ALL}.
-044 */
-045@InterfaceAudience.Public
-046final public class FilterList extends 
FilterBase {
-047
-048  /** set operator */
-049  @InterfaceAudience.Public
-050  public enum Operator {
-051/** !AND */
-052MUST_PASS_ALL,
-053/** !OR */
-054MUST_PASS_ONE
-055  }
-056
-057  private Operator operator;
-058  private FilterListBase 
filterListBase;
-059
-060  /**
-061   * Constructor that takes a set of 
{@link Filter}s and an operator.
-062   * @param operator Operator to process 
filter set with.
-063   * @param filters Set of row filters.
-064   */
-065  public FilterList(final Operator 
operator, final ListFilter filters) {
-066if (operator == 
Operator.MUST_PASS_ALL) {
-067  filterListBase = new 
FilterListWithAND(filters);
-068} else if (operator == 
Operator.MUST_PASS_ONE) {
-069  filterListBase = new 
FilterListWithOR(filters);
-070} else {
-071  throw new 
IllegalArgumentException("Invalid operator: " + operator);
-072}
-073this.operator = operator;
-074  }
-075
-076  /**
-077   * Constructor that takes a set of 
{@link Filter}s. The default operator MUST_PASS_ALL is assumed.
-078   * All filters are cloned to internal 
list.
-079   * @param filters list of filters
-080   */
-081  public FilterList(final 
ListFilter filters) {
-082this(Operator.MUST_PASS_ALL, 
filters);
-083  }
-084
-085  /**
-086   * Constructor that takes a var arg 
number of {@link Filter}s. The default operator MUST_PASS_ALL
-087   * is assumed.
-088   * @param filters
-089   */
-090  public FilterList(final Filter... 
filters) {
-091this(Operator.MUST_PASS_ALL, 
Arrays.asList(filters));
-092  }
-093
-094  /**
-095   * Constructor that takes an 
operator.
-096   * @param operator Operator to process 
filter set with.
-097   */
-098  public FilterList(final Operator 
operator) {
-099this(operator, new 
ArrayList());
-100  }
-101
-102  /**
-103   * Constructor that takes a var arg 
number of {@link Filter}s and an operator.
-104   * @param operator Operator to process 
filter set with.
-105   * @param filters Filters to use
-106   */
-107  public FilterList(final Operator 
operator, final Filter... filters) {
-108this(operator, 
Arrays.asList(filters));
-109  }
-110
-111  /**
-112   * Get the operator.
-113   * @return operator
-114   */
-115  public Operator getOperator() {
-116return operator;
-117  }
-118
-119  /**
-120   * Get the filters.
-121   * @return filters
-122   */
-123  public ListFilter getFilters() 
{
-124return filterListBase.getFilters();
-125  }
-126
-127  public int size() {
-128return filterListBase.size();
-129  }
-130
-131  public void 
addFilter(ListFilter filters) {
-132
filterListBase.addFilterLists(filters);
-133  }
-134
-135  /**
-136   * Add a filter.
-137   * @param filter another filter
-138   */
-139  public void addFilter(Filter filter) 
{
-140
addFilter(Collections.singletonList(filter));
-141  }
-142
-143  @Override
-144  public void reset() throws IOException 
{
-145filterListBase.reset();
-146  }
-147
-148  @Override
-149  public boolean filterRowKey(byte[] 
rowKey, int offset, int length) throws IOException {
-150return 
filterListBase.filterRowKey(rowKey, offset, length);
-151  }
-152
-153  @Override
-154  public boolean 

[16/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
index fa319cb..3ca642c 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":9,"i6":9,"i7":9,"i8":10,"i9":9,"i10":10,"i11":10,"i12":9,"i13":9,"i14":9,"i15":9,"i16":10,"i17":10,"i18":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":9,"i7":9,"i8":9,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":9,"i15":9,"i16":9,"i17":9,"i18":10,"i19":10,"i20":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class FuzzyRowFilter
+public class FuzzyRowFilter
 extends FilterBase
 This is optimized version of a standard FuzzyRowFilter 
Filters data based on fuzzy row key.
  Performs fast-forwards during scanning. It takes pairs (row key, fuzzy info) 
to match row keys.
@@ -265,38 +265,42 @@ extends 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
 (package private) static byte[]
 getNextForFuzzyRule(booleanreverse,
byte[]row,
byte[]fuzzyKeyBytes,
byte[]fuzzyKeyMeta)
 
-
+
 (package private) static byte[]
 getNextForFuzzyRule(booleanreverse,
byte[]row,
@@ -305,39 +309,43 @@ extends 
 
-
+
 (package private) static byte[]
 getNextForFuzzyRule(byte[]row,
byte[]fuzzyKeyBytes,
byte[]fuzzyKeyMeta)
 
-
+
+int
+hashCode()
+
+
 private boolean
 isPreprocessedMask(byte[]mask)
 
-
+
 static FuzzyRowFilter
 parseFrom(byte[]pbBytes)
 
-
+
 private byte[]
 preprocessMask(byte[]mask)
 We need to preprocess mask array, as since we treat 2's as 
unfixed positions and -1 (0xff) as
  fixed positions
 
 
-
+
 private void
 preprocessSearchKey(Pairbyte[],byte[]p)
 
-
+
 (package private) static FuzzyRowFilter.SatisfiesCode
 satisfies(booleanreverse,
  byte[]row,
  byte[]fuzzyKeyBytes,
  byte[]fuzzyKeyMeta)
 
-
+
 (package private) static FuzzyRowFilter.SatisfiesCode
 satisfies(booleanreverse,
  byte[]row,
@@ -346,13 +354,13 @@ extends 
 
-
+
 (package private) static FuzzyRowFilter.SatisfiesCode
 satisfies(byte[]row,
  byte[]fuzzyKeyBytes,
  byte[]fuzzyKeyMeta)
 
-
+
 (package private) static FuzzyRowFilter.SatisfiesCode
 satisfiesNoUnsafe(booleanreverse,
  byte[]row,
@@ -361,19 +369,19 @@ extends 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 private static byte[]
 trimTrailingZeroes(byte[]result,
   byte[]fuzzyKeyMeta,
@@ -404,7 +412,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, 

[08/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.State.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.State.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.State.html
index d799dd2..0412da9 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.State.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionState.State.html
@@ -395,6 +395,18 @@ the order they are declared.
 
 
 private void
+AssignmentManager.transitStateAndUpdate(RegionStateNoderegionNode,
+ RegionState.StatenewState,
+ RegionState.State...expectedStates)
+
+
+private void
+AssignmentManager.transitStateAndUpdate(RegionStateNoderegionNode,
+ RegionState.StatenewState,
+ RegionState.State...expectedStates)
+
+
+private void
 RegionStateStore.updateMetaLocation(RegionInforegionInfo,
   ServerNameserverName,
   RegionState.Statestate)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 0445bee..2caa10b 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -347,11 +347,11 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode
+org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
 org.apache.hadoop.hbase.master.RegionState.State
 org.apache.hadoop.hbase.master.MetricsMasterSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
 org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus
-org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
index a112762..fc6eafc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
@@ -584,7 +584,7 @@ implements 
 
 filterDefaultMetaRegions
-privatebooleanfilterDefaultMetaRegions()
+privatebooleanfilterDefaultMetaRegions()
 
 
 
@@ -593,7 +593,7 @@ implements 
 
 isDefaultMetaRegion
-privatebooleanisDefaultMetaRegion(RegionInfohri)
+privatebooleanisDefaultMetaRegion(RegionInfohri)
 
 
 
@@ -602,7 +602,7 @@ implements 
 
 splitMetaLogs
-privatevoidsplitMetaLogs(MasterProcedureEnvenv)
+privatevoidsplitMetaLogs(MasterProcedureEnvenv)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -616,7 +616,7 @@ implements 
 
 splitLogs
-privatevoidsplitLogs(MasterProcedureEnvenv)
+privatevoidsplitLogs(MasterProcedureEnvenv)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -630,7 +630,7 @@ implements 
 
 rollbackState
-protectedvoidrollbackState(MasterProcedureEnvenv,
+protectedvoidrollbackState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStatestate)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:StateMachineProcedure
@@ -650,7 +650,7 @@ implements 
 
 getState
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStategetState(intstateId)
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStategetState(intstateId)
 Description copied from 
class:StateMachineProcedure
 Convert an ordinal (or state id) to 

[39/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index ad7c82a..1dfa7b8 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -29,307 +29,322 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CompareOperator;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.hadoop.hbase.util.Bytes;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030
-031import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-032
-033import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-037
-038/**
-039 * This is a generic filter to be used to 
filter by comparison.  It takes an
-040 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-041 * p
-042 * To filter by row key, use {@link 
RowFilter}.
-043 * p
-044 * To filter by column family, use {@link 
FamilyFilter}.
-045 * p
-046 * To filter by column qualifier, use 
{@link QualifierFilter}.
-047 * p
-048 * To filter by value, use {@link 
ValueFilter}.
-049 * p
-050 * These filters can be wrapped with 
{@link SkipFilter} and {@link WhileMatchFilter}
-051 * to add more control.
-052 * p
-053 * Multiple filters can be combined using 
{@link FilterList}.
-054 */
-055@InterfaceAudience.Public
-056public abstract class CompareFilter 
extends FilterBase {
-057  /**
-058   * Comparison operators. For filters 
only!
-059   * Use {@link CompareOperator} 
otherwise.
-060   * It (intentionally) has at least the 
below enums with same names.
-061   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link CompareOperator} instead.
-062   */
-063  @Deprecated
-064  @InterfaceAudience.Public
-065  public enum CompareOp {
-066/** less than */
-067LESS,
-068/** less than or equal to */
-069LESS_OR_EQUAL,
-070/** equals */
-071EQUAL,
-072/** not equal */
-073NOT_EQUAL,
-074/** greater than or equal to */
-075GREATER_OR_EQUAL,
-076/** greater than */
-077GREATER,
-078/** no operation */
-079NO_OP,
-080  }
-081
-082  protected CompareOperator op;
-083  protected ByteArrayComparable 
comparator;
-084
-085  /**
-086   * Constructor.
-087   * @param compareOp the compare op for 
row matching
-088   * @param comparator the comparator for 
row matching
-089   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use other constructor.
-090   */
-091  @Deprecated
-092  public CompareFilter(final CompareOp 
compareOp,
-093  final ByteArrayComparable 
comparator) {
-094
this(CompareOperator.valueOf(compareOp.name()), comparator);
-095  }
-096
-097  /**
-098   * Constructor.
-099   * @param op the compare op for row 
matching
-100   * @param comparator the comparator for 
row matching
-101   */
-102  public CompareFilter(final 
CompareOperator op,
-103   final 
ByteArrayComparable comparator) {
-104this.op = op;
-105this.comparator = comparator;
-106  }
-107
-108  /**
-109   * @return operator
-110   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link #getCompareOperator()} instead.
-111   */
-112  @Deprecated
-113  public CompareOp getOperator() {
-114return 
CompareOp.valueOf(op.name());
-115  }
-116
-117  public CompareOperator 
getCompareOperator() {
-118return op;
-119  }
-120
-121  /**
-122   * @return the comparator
-123   */
-124  public ByteArrayComparable 
getComparator() {
-125return comparator;
-126  }
-127
-128  @Override
-129  public boolean filterRowKey(Cell cell) 
throws IOException {
-130// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-131return false;
-132  }
-133
-134  /**
-135   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0.
-136   * Use {@link 
#compareRow(CompareOperator, ByteArrayComparable, Cell)}
-137   */
-138  @Deprecated
-139  protected boolean compareRow(final 
CompareOp compareOp, final ByteArrayComparable comparator,
-140  final Cell cell) {
-141if (compareOp == CompareOp.NO_OP) {
-142  return true;
-143}
-144int compareResult = 
PrivateCellUtil.compareRow(cell, comparator);
-145return compare(compareOp, 
compareResult);
-146  }
-147
-148  protected boolean compareRow(final 
CompareOperator op, final ByteArrayComparable comparator,

[29/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
index c8e02c8..309387e 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html
@@ -27,195 +27,211 @@
 019
 020import java.io.IOException;
 021import java.util.ArrayList;
-022import java.util.Comparator;
-023import java.util.TreeSet;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CellUtil;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035/**
-036 * This filter is used for selecting only 
those keys with columns that matches
-037 * a particular prefix. For example, if 
prefix is 'an', it will pass keys will
-038 * columns like 'and', 'anti' but not 
keys with columns like 'ball', 'act'.
-039 */
-040@InterfaceAudience.Public
-041public class MultipleColumnPrefixFilter 
extends FilterBase {
-042  protected byte [] hint = null;
-043  protected TreeSetbyte [] 
sortedPrefixes = createTreeSet();
-044  private final static int 
MAX_LOG_PREFIXES = 5;
-045
-046  public MultipleColumnPrefixFilter(final 
byte [][] prefixes) {
-047if (prefixes != null) {
-048  for (int i = 0; i  
prefixes.length; i++) {
-049if 
(!sortedPrefixes.add(prefixes[i]))
-050  throw new 
IllegalArgumentException ("prefixes must be distinct");
-051  }
-052}
-053  }
-054
-055  public byte [][] getPrefix() {
-056int count = 0;
-057byte [][] temp = new byte 
[sortedPrefixes.size()][];
-058for (byte [] prefixes : 
sortedPrefixes) {
-059  temp [count++] = prefixes;
-060}
-061return temp;
-062  }
-063
-064  @Override
-065  public boolean filterRowKey(Cell cell) 
throws IOException {
-066// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-067return false;
-068  }
-069
-070  @Deprecated
-071  @Override
-072  public ReturnCode filterKeyValue(final 
Cell c) {
-073return filterCell(c);
-074  }
-075
-076  @Override
-077  public ReturnCode filterCell(final Cell 
c) {
-078if (sortedPrefixes.isEmpty()) {
-079  return ReturnCode.INCLUDE;
-080} else {
-081  return filterColumn(c);
-082}
-083  }
-084
-085  public ReturnCode filterColumn(Cell 
cell) {
-086byte [] qualifier = 
CellUtil.cloneQualifier(cell);
-087TreeSetbyte [] 
lesserOrEqualPrefixes =
-088  (TreeSetbyte []) 
sortedPrefixes.headSet(qualifier, true);
-089
-090if (lesserOrEqualPrefixes.size() != 
0) {
-091  byte [] 
largestPrefixSmallerThanQualifier = lesserOrEqualPrefixes.last();
-092  
-093  if (Bytes.startsWith(qualifier, 
largestPrefixSmallerThanQualifier)) {
-094return ReturnCode.INCLUDE;
-095  }
-096  
-097  if (lesserOrEqualPrefixes.size() == 
sortedPrefixes.size()) {
-098return ReturnCode.NEXT_ROW;
-099  } else {
-100hint = 
sortedPrefixes.higher(largestPrefixSmallerThanQualifier);
-101return 
ReturnCode.SEEK_NEXT_USING_HINT;
-102  }
-103} else {
-104  hint = sortedPrefixes.first();
-105  return 
ReturnCode.SEEK_NEXT_USING_HINT;
-106}
-107  }
-108
-109  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-110byte [][] prefixes = new byte 
[filterArguments.size()][];
-111for (int i = 0 ; i  
filterArguments.size(); i++) {
-112  byte [] columnPrefix = 
ParseFilter.removeQuotesFromByteArray(filterArguments.get(i));
-113  prefixes[i] = columnPrefix;
-114}
-115return new 
MultipleColumnPrefixFilter(prefixes);
-116  }
-117
-118  /**
-119   * @return The filter serialized using 
pb
-120   */
-121  @Override
-122  public byte [] toByteArray() {
-123
FilterProtos.MultipleColumnPrefixFilter.Builder builder =
-124  
FilterProtos.MultipleColumnPrefixFilter.newBuilder();
-125for (byte [] element : 
sortedPrefixes) {
-126  if (element != null) 
builder.addSortedPrefixes(UnsafeByteOperations.unsafeWrap(element));
-127}
-128return 
builder.build().toByteArray();
-129  }
-130
-131  /**
-132   * @param pbBytes A pb serialized 
{@link MultipleColumnPrefixFilter} instance
-133   * @return An instance of {@link 
MultipleColumnPrefixFilter} made from 

[49/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
index 3345090..a5c5e68 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":42,"i1":10,"i2":42,"i3":10,"i4":42,"i5":10,"i6":42,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10,"i12":42,"i13":10};
+var methods = 
{"i0":42,"i1":10,"i2":42,"i3":10,"i4":42,"i5":10,"i6":42,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":42,"i14":10,"i15":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public abstract class CompareFilter
+public abstract class CompareFilter
 extends org.apache.hadoop.hbase.filter.FilterBase
 This is a generic filter to be used to filter by 
comparison.  It takes an
  operator (equal, greater, not equal, etc) and a byte [] comparator.
@@ -316,24 +316,28 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 Cellcell)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 static https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 extractArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 ByteArrayComparable
 getComparator()
 
-
+
 CompareOperator
 getCompareOperator()
 
-
+
 CompareFilter.CompareOp
 getOperator()
 Deprecated.
@@ -341,7 +345,11 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 
-
+
+int
+hashCode()
+
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -367,7 +375,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in 

[20/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
index dab230a..2ccd245 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/ColumnValueFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class ColumnValueFilter
+public class ColumnValueFilter
 extends FilterBase
 Different from SingleColumnValueFilter which 
returns an entire row
  when specified condition is matched, ColumnValueFilter return the 
matched cell only.
@@ -254,59 +254,67 @@ extends 
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 ByteArrayComparable
 getComparator()
 
-
+
 CompareOperator
 getCompareOperator()
 
-
+
 byte[]
 getFamily()
 
-
+
 byte[]
 getQualifier()
 
-
+
+int
+hashCode()
+
+
 boolean
 isFamilyEssential(byte[]name)
 By default, we require all scan's column families to be 
present.
 
 
-
+
 static ColumnValueFilter
 parseFrom(byte[]pbBytes)
 Parse protobuf bytes to a ColumnValueFilter
 
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -332,7 +340,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, 

[13/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/PageFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/PageFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/PageFilter.html
index fdd803c..f10b3da 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/PageFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/PageFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class PageFilter
+public class PageFilter
 extends FilterBase
 Implementation of Filter interface that limits results to a 
specific page
  size. It terminates scanning once the number of filter-passed rows is 
@@ -225,24 +225,28 @@ extends 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellignored)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -250,34 +254,38 @@ extends 
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 long
 getPageSize()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 static PageFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -303,7 +311,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, 

[48/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/FilterList.html 
b/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
index 6f6ebff..b026e64 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":9,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":9,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public final class FilterList
+public final class FilterList
 extends org.apache.hadoop.hbase.filter.FilterBase
 Implementation of Filter that represents an 
ordered List of Filters which will be
  evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL
 (AND) or
@@ -244,24 +244,28 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -269,14 +273,14 @@ extends org.apache.hadoop.hbase.filter.FilterBase
  never filters a row.
 
 
-
+
 void
 filterRowCells(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellcells)
 Filters that never filter by modifying the returned List of 
Cells can inherit this
  implementation that does nothing.
 
 
-
+
 boolean
 filterRowKey(byte[]rowKey,
 intoffset,
@@ -285,82 +289,86 @@ extends org.apache.hadoop.hbase.filter.FilterBase
  never filters anything.
 
 
-
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListFilter
 getFilters()
 Get the filters.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
 FilterList.Operator
 getOperator()
 Get the operator.
 
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 boolean
 isFamilyEssential(byte[]name)
 By default, we require all scan's column families to be 
present.
 
 
-
+
 boolean
 isReversed()
 
-
+
 static FilterList
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setReversed(booleanreversed)
 alter the reversed scan flag
 
 
-
+
 int
 size()
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 Cell
 transformCell(Cellc)
 By default no transformation takes place
@@ -381,7 +389,7 @@ extends org.apache.hadoop.hbase.filter.FilterBase
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, 

[14/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html 
b/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
index 745b957..c69ff75 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public static class MultiRowRangeFilter.RowRange
+public static class MultiRowRangeFilter.RowRange
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableMultiRowRangeFilter.RowRange
 
@@ -217,22 +217,30 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 intlength)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 byte[]
 getStartRow()
 
-
+
 byte[]
 getStopRow()
 
-
+
+int
+hashCode()
+
+
 boolean
 isStartRowInclusive()
 
-
+
 boolean
 isStopRowInclusive()
 
-
+
 boolean
 isValid()
 
@@ -242,7 +250,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Ob
 ject.html?is-external=true#wait--" title="class or interface in 
java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 

[41/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
index d206019..9475950 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.html
@@ -29,147 +29,162 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033
-034import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-035import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-036import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-037
-038/**
-039 * This filter is used for selecting only 
those keys with columns that matches
-040 * a particular prefix. For example, if 
prefix is 'an', it will pass keys with
-041 * columns like 'and', 'anti' but not 
keys with columns like 'ball', 'act'.
-042 */
-043@InterfaceAudience.Public
-044public class ColumnPrefixFilter extends 
FilterBase {
-045  protected byte [] prefix = null;
-046
-047  public ColumnPrefixFilter(final byte [] 
prefix) {
-048this.prefix = prefix;
-049  }
-050
-051  public byte[] getPrefix() {
-052return prefix;
-053  }
-054
-055  @Override
-056  public boolean filterRowKey(Cell cell) 
throws IOException {
-057// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-058return false;
-059  }
-060
-061  @Deprecated
-062  @Override
-063  public ReturnCode filterKeyValue(final 
Cell c) {
-064return filterCell(c);
-065  }
-066
-067  @Override
-068  public ReturnCode filterCell(final Cell 
cell) {
-069if (this.prefix == null) {
-070  return ReturnCode.INCLUDE;
-071} else {
-072  return filterColumn(cell);
-073}
-074  }
-075
-076  public ReturnCode filterColumn(Cell 
cell) {
-077int qualifierLength = 
cell.getQualifierLength();
-078if (qualifierLength  
prefix.length) {
-079  int cmp = 
compareQualifierPart(cell, qualifierLength, this.prefix);
-080  if (cmp = 0) {
-081return 
ReturnCode.SEEK_NEXT_USING_HINT;
-082  } else {
-083return ReturnCode.NEXT_ROW;
-084  }
-085} else {
-086  int cmp = 
compareQualifierPart(cell, this.prefix.length, this.prefix);
-087  if (cmp  0) {
-088return 
ReturnCode.SEEK_NEXT_USING_HINT;
-089  } else if (cmp  0) {
-090return ReturnCode.NEXT_ROW;
-091  } else {
-092return ReturnCode.INCLUDE;
-093  }
-094}
-095  }
-096
-097  private static int 
compareQualifierPart(Cell cell, int length, byte[] prefix) {
-098if (cell instanceof 
ByteBufferExtendedCell) {
-099  return 
ByteBufferUtils.compareTo(((ByteBufferExtendedCell) 
cell).getQualifierByteBuffer(),
-100  ((ByteBufferExtendedCell) 
cell).getQualifierPosition(), length, prefix, 0, length);
-101}
-102return 
Bytes.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), length, 
prefix, 0,
-103length);
-104  }
-105
-106  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-107
Preconditions.checkArgument(filterArguments.size() == 1,
-108"Expected 
1 but got: %s", filterArguments.size());
-109byte [] columnPrefix = 
ParseFilter.removeQuotesFromByteArray(filterArguments.get(0));
-110return new 
ColumnPrefixFilter(columnPrefix);
-111  }
-112
-113  /**
-114   * @return The filter serialized using 
pb
-115   */
-116  @Override
-117  public byte [] toByteArray() {
-118
FilterProtos.ColumnPrefixFilter.Builder builder =
-119  
FilterProtos.ColumnPrefixFilter.newBuilder();
-120if (this.prefix != null) 
builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix));
-121return 
builder.build().toByteArray();
-122  }
-123
-124  /**
-125   * @param pbBytes A pb serialized 
{@link ColumnPrefixFilter} instance
-126   * @return An instance of {@link 
ColumnPrefixFilter} made from codebytes/code
-127   * @throws 
org.apache.hadoop.hbase.exceptions.DeserializationException
-128   * @see #toByteArray
-129   */
-130  public static ColumnPrefixFilter 
parseFrom(final byte [] pbBytes)
-131  throws DeserializationException {
-132FilterProtos.ColumnPrefixFilter 
proto;
-133try {
-134  proto 

[37/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
index 9425699..f592ec3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
@@ -30,296 +30,312 @@
 022import java.util.ArrayList;
 023import java.util.HashSet;
 024import java.util.List;
-025import java.util.Set;
-026
-027import org.apache.hadoop.hbase.Cell;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.CompareOperator;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-034import 
org.apache.hadoop.hbase.util.Bytes;
-035
-036import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-037
-038import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-039import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-040
-041/**
-042 * A filter for adding inter-column 
timestamp matching
-043 * Only cells with a correspondingly 
timestamped entry in
-044 * the target column will be retained
-045 * Not compatible with Scan.setBatch as 
operations need 
-046 * full rows for correct filtering 
-047 */
-048@InterfaceAudience.Public
-049public class DependentColumnFilter 
extends CompareFilter {
-050
-051  protected byte[] columnFamily;
-052  protected byte[] columnQualifier;
-053  protected boolean 
dropDependentColumn;
-054
-055  protected SetLong stampSet = 
new HashSet();
-056  
-057  /**
-058   * Build a dependent column filter with 
value checking
-059   * dependent column varies will be 
compared using the supplied
-060   * compareOp and comparator, for usage 
of which
-061   * refer to {@link CompareFilter}
-062   * 
-063   * @param family dependent column 
family
-064   * @param qualifier dependent column 
qualifier
-065   * @param dropDependentColumn whether 
the column should be discarded after
-066   * @param valueCompareOp comparison op 

-067   * @param valueComparator comparator
-068   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use
-069   * {@link 
#DependentColumnFilter(byte[], byte[], boolean, CompareOperator, 
ByteArrayComparable)}
-070   * instead.
-071   */
-072  @Deprecated
-073  public DependentColumnFilter(final byte 
[] family, final byte[] qualifier,
-074  final boolean dropDependentColumn, 
final CompareOp valueCompareOp,
-075final ByteArrayComparable 
valueComparator) {
-076this(family, qualifier, 
dropDependentColumn, CompareOperator.valueOf(valueCompareOp.name()),
-077  valueComparator);
-078  }
-079
-080  /**
-081   * Build a dependent column filter with 
value checking
-082   * dependent column varies will be 
compared using the supplied
-083   * compareOp and comparator, for usage 
of which
-084   * refer to {@link CompareFilter}
-085   *
-086   * @param family dependent column 
family
-087   * @param qualifier dependent column 
qualifier
-088   * @param dropDependentColumn whether 
the column should be discarded after
-089   * @param op Value comparison op
-090   * @param valueComparator comparator
-091   */
-092  public DependentColumnFilter(final byte 
[] family, final byte[] qualifier,
-093   final 
boolean dropDependentColumn, final CompareOperator op,
-094   final 
ByteArrayComparable valueComparator) {
-095// set up the comparator
-096super(op, valueComparator);
-097this.columnFamily = family;
-098this.columnQualifier = qualifier;
-099this.dropDependentColumn = 
dropDependentColumn;
-100  }
-101  
-102  /**
-103   * Constructor for DependentColumn 
filter.
-104   * Cells where a Cell from target 
column
-105   * with the same timestamp do not exist 
will be dropped.
-106   *
-107   * @param family name of target column 
family
-108   * @param qualifier name of column 
qualifier
-109   */
-110  public DependentColumnFilter(final byte 
[] family, final byte [] qualifier) {
-111this(family, qualifier, false);
-112  }
-113  
-114  /**
-115   * Constructor for DependentColumn 
filter.
-116   * Cells where a Cell from target 
column
-117   * with the same timestamp do not exist 
will be dropped.
-118   *
-119   * @param family name of dependent 
column family
-120   * @param qualifier name of dependent 
qualifier
-121   * @param dropDependentColumn whether 
the dependent columns Cells should be discarded
-122   */
-123  public DependentColumnFilter(final byte 
[] family, 

[38/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
index ad7c82a..1dfa7b8 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -29,307 +29,322 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CompareOperator;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.hadoop.hbase.util.Bytes;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030
-031import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-032
-033import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-037
-038/**
-039 * This is a generic filter to be used to 
filter by comparison.  It takes an
-040 * operator (equal, greater, not equal, 
etc) and a byte [] comparator.
-041 * p
-042 * To filter by row key, use {@link 
RowFilter}.
-043 * p
-044 * To filter by column family, use {@link 
FamilyFilter}.
-045 * p
-046 * To filter by column qualifier, use 
{@link QualifierFilter}.
-047 * p
-048 * To filter by value, use {@link 
ValueFilter}.
-049 * p
-050 * These filters can be wrapped with 
{@link SkipFilter} and {@link WhileMatchFilter}
-051 * to add more control.
-052 * p
-053 * Multiple filters can be combined using 
{@link FilterList}.
-054 */
-055@InterfaceAudience.Public
-056public abstract class CompareFilter 
extends FilterBase {
-057  /**
-058   * Comparison operators. For filters 
only!
-059   * Use {@link CompareOperator} 
otherwise.
-060   * It (intentionally) has at least the 
below enums with same names.
-061   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link CompareOperator} instead.
-062   */
-063  @Deprecated
-064  @InterfaceAudience.Public
-065  public enum CompareOp {
-066/** less than */
-067LESS,
-068/** less than or equal to */
-069LESS_OR_EQUAL,
-070/** equals */
-071EQUAL,
-072/** not equal */
-073NOT_EQUAL,
-074/** greater than or equal to */
-075GREATER_OR_EQUAL,
-076/** greater than */
-077GREATER,
-078/** no operation */
-079NO_OP,
-080  }
-081
-082  protected CompareOperator op;
-083  protected ByteArrayComparable 
comparator;
-084
-085  /**
-086   * Constructor.
-087   * @param compareOp the compare op for 
row matching
-088   * @param comparator the comparator for 
row matching
-089   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use other constructor.
-090   */
-091  @Deprecated
-092  public CompareFilter(final CompareOp 
compareOp,
-093  final ByteArrayComparable 
comparator) {
-094
this(CompareOperator.valueOf(compareOp.name()), comparator);
-095  }
-096
-097  /**
-098   * Constructor.
-099   * @param op the compare op for row 
matching
-100   * @param comparator the comparator for 
row matching
-101   */
-102  public CompareFilter(final 
CompareOperator op,
-103   final 
ByteArrayComparable comparator) {
-104this.op = op;
-105this.comparator = comparator;
-106  }
-107
-108  /**
-109   * @return operator
-110   * @deprecated  since 2.0.0. Will be 
removed in 3.0.0. Use {@link #getCompareOperator()} instead.
-111   */
-112  @Deprecated
-113  public CompareOp getOperator() {
-114return 
CompareOp.valueOf(op.name());
-115  }
-116
-117  public CompareOperator 
getCompareOperator() {
-118return op;
-119  }
-120
-121  /**
-122   * @return the comparator
-123   */
-124  public ByteArrayComparable 
getComparator() {
-125return comparator;
-126  }
-127
-128  @Override
-129  public boolean filterRowKey(Cell cell) 
throws IOException {
-130// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-131return false;
-132  }
-133
-134  /**
-135   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0.
-136   * Use {@link 
#compareRow(CompareOperator, ByteArrayComparable, Cell)}
-137   */
-138  @Deprecated
-139  protected boolean compareRow(final 
CompareOp compareOp, final ByteArrayComparable comparator,
-140  final Cell cell) {
-141if (compareOp == CompareOp.NO_OP) {
-142  return true;
-143}
-144int compareResult = 
PrivateCellUtil.compareRow(cell, comparator);
-145return compare(compareOp, 
compareResult);
-146  }
-147
-148  protected boolean compareRow(final 
CompareOperator op, final ByteArrayComparable comparator,
-149   final Cell 
cell) 

[06/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
index c6137d0..4ca69da 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.html
@@ -29,118 +29,133 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.yetus.audience.InterfaceAudience;
-027import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-028import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-029
-030import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032
-033/**
-034 * Simple filter that returns first N 
columns on row only.
-035 * This filter was written to test 
filters in Get and as soon as it gets
-036 * its quota of columns, {@link 
#filterAllRemaining()} returns true.  This
-037 * makes this filter unsuitable as a Scan 
filter.
-038 */
-039@InterfaceAudience.Public
-040public class ColumnCountGetFilter extends 
FilterBase {
-041  private int limit = 0;
-042  private int count = 0;
-043
-044  public ColumnCountGetFilter(final int 
n) {
-045Preconditions.checkArgument(n = 
0, "limit be positive %s", n);
-046this.limit = n;
-047  }
-048
-049  public int getLimit() {
-050return limit;
-051  }
-052
-053  @Override
-054  public boolean filterRowKey(Cell cell) 
throws IOException {
-055// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-056if (filterAllRemaining()) return 
true;
-057return false;
-058  }
-059
-060  @Override
-061  public boolean filterAllRemaining() {
-062return this.count  this.limit;
-063  }
-064
-065  @Deprecated
-066  @Override
-067  public ReturnCode filterKeyValue(final 
Cell c) {
-068return filterCell(c);
-069  }
-070
-071  @Override
-072  public ReturnCode filterCell(final Cell 
c) {
-073this.count++;
-074return filterAllRemaining() ? 
ReturnCode.NEXT_COL : ReturnCode.INCLUDE_AND_NEXT_COL;
-075  }
-076
-077  @Override
-078  public void reset() {
-079this.count = 0;
-080  }
-081
-082  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-083
Preconditions.checkArgument(filterArguments.size() == 1,
-084"Expected 
1 but got: %s", filterArguments.size());
-085int limit = 
ParseFilter.convertByteArrayToInt(filterArguments.get(0));
-086return new 
ColumnCountGetFilter(limit);
-087  }
-088
-089  /**
-090   * @return The filter serialized using 
pb
-091   */
-092  @Override
-093  public byte [] toByteArray() {
-094
FilterProtos.ColumnCountGetFilter.Builder builder =
-095  
FilterProtos.ColumnCountGetFilter.newBuilder();
-096builder.setLimit(this.limit);
-097return 
builder.build().toByteArray();
-098  }
-099
-100  /**
-101   * @param pbBytes A pb serialized 
{@link ColumnCountGetFilter} instance
-102   * @return An instance of {@link 
ColumnCountGetFilter} made from codebytes/code
-103   * @throws 
org.apache.hadoop.hbase.exceptions.DeserializationException
-104   * @see #toByteArray
-105   */
-106  public static ColumnCountGetFilter 
parseFrom(final byte [] pbBytes)
-107  throws DeserializationException {
-108FilterProtos.ColumnCountGetFilter 
proto;
-109try {
-110  proto = 
FilterProtos.ColumnCountGetFilter.parseFrom(pbBytes);
-111} catch 
(InvalidProtocolBufferException e) {
-112  throw new 
DeserializationException(e);
-113}
-114return new 
ColumnCountGetFilter(proto.getLimit());
-115  }
-116
-117  /**
-118   * @param o the other filter to compare 
with
-119   * @return true if and only if the 
fields of the filter that are serialized
-120   * are equal to the corresponding 
fields in other.  Used for testing.
-121   */
-122  @Override
-123  boolean areSerializedFieldsEqual(Filter 
o) {
-124if (o == this) return true;
-125if (!(o instanceof 
ColumnCountGetFilter)) return false;
-126
-127ColumnCountGetFilter other = 
(ColumnCountGetFilter)o;
-128return this.getLimit() == 
other.getLimit();
-129  }
-130
-131  @Override
-132  public String toString() {
-133return 
this.getClass().getSimpleName() + " " + this.limit;
-134  }
-135}
+024import java.util.Objects;
+025
+026import org.apache.hadoop.hbase.Cell;
+027import 
org.apache.yetus.audience.InterfaceAudience;
+028import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+029import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+030
+031import 

[21/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.html
index ac54f87..368fdb4 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class ColumnPaginationFilter
+public class ColumnPaginationFilter
 extends FilterBase
 A filter, based on the ColumnCountGetFilter, takes two 
arguments: limit and offset.
  This filter can be used for row-based indexing, where references to other 
tables are stored across many columns,
@@ -234,60 +234,68 @@ extends createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 byte[]
 getColumnOffset()
 
-
+
 int
 getLimit()
 
-
+
 Cell
 getNextCellHint(Cellcell)
 Filters that are not sure which key must be next seeked to, 
can inherit
  this implementation that, by default, returns a null Cell.
 
 
-
+
 int
 getOffset()
 
-
+
+int
+hashCode()
+
+
 static ColumnPaginationFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -313,7 +321,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, 

[01/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 455e3292b -> 0cf79db0e


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
index 9425699..f592ec3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
@@ -30,296 +30,312 @@
 022import java.util.ArrayList;
 023import java.util.HashSet;
 024import java.util.List;
-025import java.util.Set;
-026
-027import org.apache.hadoop.hbase.Cell;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.CompareOperator;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-034import 
org.apache.hadoop.hbase.util.Bytes;
-035
-036import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-037
-038import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-039import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-040
-041/**
-042 * A filter for adding inter-column 
timestamp matching
-043 * Only cells with a correspondingly 
timestamped entry in
-044 * the target column will be retained
-045 * Not compatible with Scan.setBatch as 
operations need 
-046 * full rows for correct filtering 
-047 */
-048@InterfaceAudience.Public
-049public class DependentColumnFilter 
extends CompareFilter {
-050
-051  protected byte[] columnFamily;
-052  protected byte[] columnQualifier;
-053  protected boolean 
dropDependentColumn;
-054
-055  protected SetLong stampSet = 
new HashSet();
-056  
-057  /**
-058   * Build a dependent column filter with 
value checking
-059   * dependent column varies will be 
compared using the supplied
-060   * compareOp and comparator, for usage 
of which
-061   * refer to {@link CompareFilter}
-062   * 
-063   * @param family dependent column 
family
-064   * @param qualifier dependent column 
qualifier
-065   * @param dropDependentColumn whether 
the column should be discarded after
-066   * @param valueCompareOp comparison op 

-067   * @param valueComparator comparator
-068   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use
-069   * {@link 
#DependentColumnFilter(byte[], byte[], boolean, CompareOperator, 
ByteArrayComparable)}
-070   * instead.
-071   */
-072  @Deprecated
-073  public DependentColumnFilter(final byte 
[] family, final byte[] qualifier,
-074  final boolean dropDependentColumn, 
final CompareOp valueCompareOp,
-075final ByteArrayComparable 
valueComparator) {
-076this(family, qualifier, 
dropDependentColumn, CompareOperator.valueOf(valueCompareOp.name()),
-077  valueComparator);
-078  }
-079
-080  /**
-081   * Build a dependent column filter with 
value checking
-082   * dependent column varies will be 
compared using the supplied
-083   * compareOp and comparator, for usage 
of which
-084   * refer to {@link CompareFilter}
-085   *
-086   * @param family dependent column 
family
-087   * @param qualifier dependent column 
qualifier
-088   * @param dropDependentColumn whether 
the column should be discarded after
-089   * @param op Value comparison op
-090   * @param valueComparator comparator
-091   */
-092  public DependentColumnFilter(final byte 
[] family, final byte[] qualifier,
-093   final 
boolean dropDependentColumn, final CompareOperator op,
-094   final 
ByteArrayComparable valueComparator) {
-095// set up the comparator
-096super(op, valueComparator);
-097this.columnFamily = family;
-098this.columnQualifier = qualifier;
-099this.dropDependentColumn = 
dropDependentColumn;
-100  }
-101  
-102  /**
-103   * Constructor for DependentColumn 
filter.
-104   * Cells where a Cell from target 
column
-105   * with the same timestamp do not exist 
will be dropped.
-106   *
-107   * @param family name of target column 
family
-108   * @param qualifier name of column 
qualifier
-109   */
-110  public DependentColumnFilter(final byte 
[] family, final byte [] qualifier) {
-111this(family, qualifier, false);
-112  }
-113  
-114  /**
-115   * Constructor for DependentColumn 
filter.
-116   * Cells where a Cell from target 
column
-117   * with the same timestamp do not exist 
will be dropped.
-118   *
-119   * @param family name of dependent 
column family
-120   * @param qualifier name of dependent 
qualifier
-121   * @param dropDependentColumn whether 
the dependent 

[45/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/org/apache/hadoop/hbase/filter/QualifierFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/QualifierFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/QualifierFilter.html
index eede21a..cfecc97 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/QualifierFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/QualifierFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":9,"i1":10,"i2":42,"i3":9,"i4":10};
+var methods = {"i0":9,"i1":10,"i2":10,"i3":42,"i4":10,"i5":9,"i6":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -125,7 +125,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class QualifierFilter
+public class QualifierFilter
 extends CompareFilter
 This filter is used to filter based on the column 
qualifier. It takes an
  operator (equal, greater, not equal, etc) and a byte [] comparator for the
@@ -235,22 +235,30 @@ extends createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
+int
+hashCode()
+
+
 static QualifierFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
@@ -283,7 +291,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.htm
 l?is-external=true#wait-long-" title="class or interface in 
java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
 
 
 
@@ -306,7 +314,7 @@ extends 
 QualifierFilter
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicQualifierFilter(CompareFilter.CompareOpop,
+publicQualifierFilter(CompareFilter.CompareOpop,
   

[33/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
index 9b9c648..c5cb1c7 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/InclusiveStopFilter.html
@@ -28,118 +28,133 @@
 020package org.apache.hadoop.hbase.filter;
 021
 022import java.util.ArrayList;
-023
-024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellComparator;
-026import 
org.apache.yetus.audience.InterfaceAudience;
-027import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-028import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-029import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hadoop.hbase.util.Bytes;
-032
-033import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-034
-035/**
-036 * A Filter that stops after the given 
row.  There is no "RowStopFilter" because
-037 * the Scan spec allows you to specify a 
stop row.
-038 *
-039 * Use this filter to include the stop 
row, eg: [A,Z].
-040 */
-041@InterfaceAudience.Public
-042public class InclusiveStopFilter extends 
FilterBase {
-043  private byte [] stopRowKey;
-044  private boolean done = false;
-045
-046  public InclusiveStopFilter(final byte 
[] stopRowKey) {
-047this.stopRowKey = stopRowKey;
-048  }
-049
-050  public byte[] getStopRowKey() {
-051return this.stopRowKey;
-052  }
-053
-054  @Deprecated
-055  @Override
-056  public ReturnCode filterKeyValue(final 
Cell c) {
-057return filterCell(c);
-058  }
-059
-060  @Override
-061  public ReturnCode filterCell(final Cell 
c) {
-062if (done) return 
ReturnCode.NEXT_ROW;
-063return ReturnCode.INCLUDE;
-064  }
-065
-066  @Override
-067  public boolean filterRowKey(Cell 
firstRowCell) {
-068// if stopRowKey is = buffer, 
then true, filter row.
-069if (filterAllRemaining()) return 
true;
-070int cmp = 
CellComparator.getInstance().compareRows(firstRowCell, stopRowKey, 0, 
stopRowKey.length);
-071done = reversed ? cmp  0 : cmp 
 0;
-072return done;
-073  }
-074
-075  @Override
-076  public boolean filterAllRemaining() {
-077return done;
-078  }
-079
-080  public static Filter 
createFilterFromArguments (ArrayListbyte [] filterArguments) {
-081
Preconditions.checkArgument(filterArguments.size() == 1,
-082"Expected 
1 but got: %s", filterArguments.size());
-083byte [] stopRowKey = 
ParseFilter.removeQuotesFromByteArray(filterArguments.get(0));
-084return new 
InclusiveStopFilter(stopRowKey);
-085  }
-086
-087  /**
-088   * @return The filter serialized using 
pb
-089   */
-090  @Override
-091  public byte [] toByteArray() {
-092
FilterProtos.InclusiveStopFilter.Builder builder =
-093  
FilterProtos.InclusiveStopFilter.newBuilder();
-094if (this.stopRowKey != null) 
builder.setStopRowKey(
-095
UnsafeByteOperations.unsafeWrap(this.stopRowKey));
-096return 
builder.build().toByteArray();
-097  }
-098
-099  /**
-100   * @param pbBytes A pb serialized 
{@link InclusiveStopFilter} instance
-101   * @return An instance of {@link 
InclusiveStopFilter} made from codebytes/code
-102   * @throws DeserializationException
-103   * @see #toByteArray
-104   */
-105  public static InclusiveStopFilter 
parseFrom(final byte [] pbBytes)
-106  throws DeserializationException {
-107FilterProtos.InclusiveStopFilter 
proto;
-108try {
-109  proto = 
FilterProtos.InclusiveStopFilter.parseFrom(pbBytes);
-110} catch 
(InvalidProtocolBufferException e) {
-111  throw new 
DeserializationException(e);
-112}
-113return new 
InclusiveStopFilter(proto.hasStopRowKey()?proto.getStopRowKey().toByteArray():null);
-114  }
-115
-116  /**
-117   * @param o the other filter to compare 
with
-118   * @return true if and only if the 
fields of the filter that are serialized
-119   * are equal to the corresponding 
fields in other.  Used for testing.
-120   */
-121  @Override
-122  boolean areSerializedFieldsEqual(Filter 
o) {
-123if (o == this) return true;
-124if (!(o instanceof 
InclusiveStopFilter)) return false;
-125
-126InclusiveStopFilter other = 
(InclusiveStopFilter)o;
-127return 
Bytes.equals(this.getStopRowKey(), other.getStopRowKey());
-128  }
-129
-130  @Override
-131  public String toString() {
-132return 
this.getClass().getSimpleName() + " " + 
Bytes.toStringBinary(this.stopRowKey);
-133  }
-134}
+023import java.util.Objects;
+024
+025import org.apache.hadoop.hbase.Cell;
+026import 

[24/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/ValueFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/ValueFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ValueFilter.html
index ab5f2a7..2f19834 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/ValueFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ValueFilter.html
@@ -29,125 +29,140 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CompareOperator;
-027import 
org.apache.yetus.audience.InterfaceAudience;
-028import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-029import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-032
-033/**
-034 * This filter is used to filter based on 
column value. It takes an
-035 * operator (equal, greater, not equal, 
etc) and a byte [] comparator for the
-036 * cell value.
-037 * p
-038 * This filter can be wrapped with {@link 
WhileMatchFilter} and {@link SkipFilter}
-039 * to add more control.
-040 * p
-041 * Multiple filters can be combined using 
{@link FilterList}.
-042 * p
-043 * To test the value of a single 
qualifier when scanning multiple qualifiers,
-044 * use {@link SingleColumnValueFilter}.
-045 */
-046@InterfaceAudience.Public
-047public class ValueFilter extends 
CompareFilter {
-048
-049  /**
-050   * Constructor.
-051   * @param valueCompareOp the compare op 
for value matching
-052   * @param valueComparator the 
comparator for value matching
-053   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0.
-054   * Use {@link 
#ValueFilter(CompareOperator, ByteArrayComparable)}
-055   */
-056  public ValueFilter(final CompareOp 
valueCompareOp,
-057  final ByteArrayComparable 
valueComparator) {
-058super(valueCompareOp, 
valueComparator);
-059  }
-060
-061  /**
-062   * Constructor.
-063   * @param valueCompareOp the compare op 
for value matching
-064   * @param valueComparator the 
comparator for value matching
-065   */
-066  public ValueFilter(final 
CompareOperator valueCompareOp,
-067 final 
ByteArrayComparable valueComparator) {
-068super(valueCompareOp, 
valueComparator);
-069  }
-070
-071  @Deprecated
-072  @Override
-073  public ReturnCode filterKeyValue(final 
Cell c) {
-074return filterCell(c);
-075  }
-076
-077  @Override
-078  public ReturnCode filterCell(final Cell 
c) {
-079if 
(compareValue(getCompareOperator(), this.comparator, c)) {
-080  return ReturnCode.SKIP;
-081}
-082return ReturnCode.INCLUDE;
-083  }
-084
-085  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-086@SuppressWarnings("rawtypes")  // for 
arguments
-087ArrayList arguments = 
CompareFilter.extractArguments(filterArguments);
-088CompareOperator compareOp = 
(CompareOperator)arguments.get(0);
-089ByteArrayComparable comparator = 
(ByteArrayComparable)arguments.get(1);
-090return new ValueFilter(compareOp, 
comparator);
-091  }
-092
-093  /**
-094   * @return The filter serialized using 
pb
-095   */
-096  @Override
-097  public byte [] toByteArray() {
-098FilterProtos.ValueFilter.Builder 
builder =
-099  
FilterProtos.ValueFilter.newBuilder();
-100
builder.setCompareFilter(super.convert());
-101return 
builder.build().toByteArray();
-102  }
-103
-104  /**
-105   * @param pbBytes A pb serialized 
{@link ValueFilter} instance
-106   * @return An instance of {@link 
ValueFilter} made from codebytes/code
-107   * @throws DeserializationException
-108   * @see #toByteArray
-109   */
-110  public static ValueFilter 
parseFrom(final byte [] pbBytes)
-111  throws DeserializationException {
-112FilterProtos.ValueFilter proto;
-113try {
-114  proto = 
FilterProtos.ValueFilter.parseFrom(pbBytes);
-115} catch 
(InvalidProtocolBufferException e) {
-116  throw new 
DeserializationException(e);
-117}
-118final CompareOperator valueCompareOp 
=
-119  
CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name());
-120ByteArrayComparable valueComparator = 
null;
-121try {
-122  if 
(proto.getCompareFilter().hasComparator()) {
-123valueComparator = 
ProtobufUtil.toComparator(proto.getCompareFilter().getComparator());
-124  }
-125} catch (IOException ioe) {
-126  throw new 
DeserializationException(ioe);
-127}
-128return new 
ValueFilter(valueCompareOp,valueComparator);
-129  }
-130
-131  /**
-132   * @return true if and only if the 
fields of the filter that are serialized
-133   * are equal to the corresponding 
fields in other.  Used for testing.
-134   

[25/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/SkipFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/SkipFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/SkipFilter.html
index 9f62fe6..9a40948 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/SkipFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/SkipFilter.html
@@ -28,148 +28,163 @@
 020package org.apache.hadoop.hbase.filter;
 021
 022import java.io.IOException;
-023
-024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.yetus.audience.InterfaceAudience;
-026import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-027import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-028import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-029import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-030
-031/**
-032 * A wrapper filter that filters an 
entire row if any of the Cell checks do
-033 * not pass.
-034 * p
-035 * For example, if all columns in a row 
represent weights of different things,
-036 * with the values being the actual 
weights, and we want to filter out the
-037 * entire row if any of its weights are 
zero.  In this case, we want to prevent
-038 * rows from being emitted if a single 
key is filtered.  Combine this filter
-039 * with a {@link ValueFilter}:
-040 * /p
-041 * p
-042 * code
-043 * scan.setFilter(new SkipFilter(new 
ValueFilter(CompareOp.NOT_EQUAL,
-044 * new 
BinaryComparator(Bytes.toBytes(0;
-045 * /code
-046 * Any row which contained a column whose 
value was 0 will be filtered out
-047 * (since ValueFilter will not pass that 
Cell).
-048 * Without this filter, the other 
non-zero valued columns in the row would still
-049 * be emitted.
-050 * /p
-051 */
-052@InterfaceAudience.Public
-053public class SkipFilter extends 
FilterBase {
-054  private boolean filterRow = false;
-055  private Filter filter;
-056
-057  public SkipFilter(Filter filter) {
-058this.filter = filter;
-059  }
-060
-061  public Filter getFilter() {
-062return filter;
-063  }
-064
-065  @Override
-066  public void reset() throws IOException 
{
-067filter.reset();
-068filterRow = false;
-069  }
-070
-071  private void changeFR(boolean value) 
{
-072filterRow = filterRow || value;
-073  }
-074
-075  @Override
-076  public boolean filterRowKey(Cell cell) 
throws IOException {
-077// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-078return false;
-079  }
-080
-081  @Deprecated
-082  @Override
-083  public ReturnCode filterKeyValue(final 
Cell c) throws IOException {
-084return filterCell(c);
-085  }
-086
-087  @Override
-088  public ReturnCode filterCell(final Cell 
c) throws IOException {
-089ReturnCode rc = 
filter.filterCell(c);
-090changeFR(rc != ReturnCode.INCLUDE);
-091return rc;
-092  }
-093
-094  @Override
-095  public Cell transformCell(Cell v) 
throws IOException {
-096return filter.transformCell(v);
-097  }
-098
-099  @Override
-100  public boolean filterRow() {
-101return filterRow;
-102  }
-103
-104  @Override
-105  public boolean hasFilterRow() {
-106return true;
-107  }
-108
-109  /**
-110   * @return The filter serialized using 
pb
-111   */
-112  @Override
-113  public byte[] toByteArray() throws 
IOException {
-114FilterProtos.SkipFilter.Builder 
builder =
-115  
FilterProtos.SkipFilter.newBuilder();
-116
builder.setFilter(ProtobufUtil.toFilter(this.filter));
-117return 
builder.build().toByteArray();
-118  }
-119
-120  /**
-121   * @param pbBytes A pb serialized 
{@link SkipFilter} instance
-122   * @return An instance of {@link 
SkipFilter} made from codebytes/code
-123   * @throws DeserializationException
-124   * @see #toByteArray
-125   */
-126  public static SkipFilter 
parseFrom(final byte [] pbBytes)
-127  throws DeserializationException {
-128FilterProtos.SkipFilter proto;
-129try {
-130  proto = 
FilterProtos.SkipFilter.parseFrom(pbBytes);
-131} catch 
(InvalidProtocolBufferException e) {
-132  throw new 
DeserializationException(e);
-133}
-134try {
-135  return new 
SkipFilter(ProtobufUtil.toFilter(proto.getFilter()));
-136} catch (IOException ioe) {
-137  throw new 
DeserializationException(ioe);
-138}
-139  }
-140
-141  /**
-142   * @param o the other filter to compare 
with
-143   * @return true if and only if the 
fields of the filter that are serialized
-144   * are equal to the corresponding 
fields in other.  Used for testing.
-145   */
-146  @Override
-147  boolean areSerializedFieldsEqual(Filter 
o) {
-148if (o == this) return true;
-149if (!(o instanceof SkipFilter)) 
return false;
-150
-151SkipFilter other = (SkipFilter)o;
-152return 

[26/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
index 77dc6d6..67b265c 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
@@ -29,446 +29,462 @@
 021
 022import java.io.IOException;
 023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.CellUtil;
-027import 
org.apache.hadoop.hbase.CompareOperator;
-028import 
org.apache.hadoop.hbase.PrivateCellUtil;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-031import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-032import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-038import 
org.apache.hadoop.hbase.util.Bytes;
-039
-040import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-041
-042/**
-043 * This filter is used to filter cells 
based on value. It takes a {@link CompareFilter.CompareOp}
-044 * operator (equal, greater, not equal, 
etc), and either a byte [] value or
-045 * a ByteArrayComparable.
-046 * p
-047 * If we have a byte [] value then we 
just do a lexicographic compare. For
-048 * example, if passed value is 'b' and 
cell has 'a' and the compare operator
-049 * is LESS, then we will filter out this 
cell (return true).  If this is not
-050 * sufficient (eg you want to deserialize 
a long and then compare it to a fixed
-051 * long value), then you can pass in your 
own comparator instead.
-052 * p
-053 * You must also specify a family and 
qualifier.  Only the value of this column
-054 * will be tested. When using this filter 
on a 
-055 * {@link 
org.apache.hadoop.hbase.CellScanner} with specified
-056 * inputs, the column to be tested should 
also be added as input (otherwise
-057 * the filter will regard the column as 
missing).
-058 * p
-059 * To prevent the entire row from being 
emitted if the column is not found
-060 * on a row, use {@link 
#setFilterIfMissing}.
-061 * Otherwise, if the column is found, the 
entire row will be emitted only if
-062 * the value passes.  If the value fails, 
the row will be filtered out.
-063 * p
-064 * In order to test values of previous 
versions (timestamps), set
-065 * {@link #setLatestVersionOnly} to 
false. The default is true, meaning that
-066 * only the latest version's value is 
tested and all previous versions are ignored.
-067 * p
-068 * To filter based on the value of all 
scanned columns, use {@link ValueFilter}.
-069 */
-070@InterfaceAudience.Public
-071public class SingleColumnValueFilter 
extends FilterBase {
-072
-073  protected byte [] columnFamily;
-074  protected byte [] columnQualifier;
-075  protected CompareOperator op;
-076  protected 
org.apache.hadoop.hbase.filter.ByteArrayComparable comparator;
-077  protected boolean foundColumn = 
false;
-078  protected boolean matchedColumn = 
false;
-079  protected boolean filterIfMissing = 
false;
-080  protected boolean latestVersionOnly = 
true;
-081
-082  /**
-083   * Constructor for binary compare of 
the value of a single column.  If the
-084   * column is found and the condition 
passes, all columns of the row will be
-085   * emitted.  If the condition fails, 
the row will not be emitted.
-086   * p
-087   * Use the filterIfColumnMissing flag 
to set whether the rest of the columns
-088   * in a row will be emitted if the 
specified column to check is not found in
-089   * the row.
-090   *
-091   * @param family name of column 
family
-092   * @param qualifier name of column 
qualifier
-093   * @param compareOp operator
-094   * @param value value to compare column 
values against
-095   * @deprecated Since 2.0.0. Will be 
removed in 3.0.0. Use
-096   * {@link 
#SingleColumnValueFilter(byte[], byte[], CompareOperator, byte[])} instead.
-097   */
-098  @Deprecated
-099  public SingleColumnValueFilter(final 
byte [] family, final byte [] qualifier,
-100  final CompareOp compareOp, final 
byte[] value) {
-101this(family, qualifier, 
CompareOperator.valueOf(compareOp.name()),
-102  new 
org.apache.hadoop.hbase.filter.BinaryComparator(value));
-103  }
-104
-105  /**
-106   * Constructor for binary