svn commit: r13384 - /dev/hbase/0.98.19RC0/

2016-04-22 Thread apurtell
Author: apurtell
Date: Sat Apr 23 02:28:42 2016
New Revision: 13384

Log:
HBase 0.98.19RC0 artifacts and compat report

Added:
dev/hbase/0.98.19RC0/
dev/hbase/0.98.19RC0/0.98.18_0.98.19RC0_compat_report.html
dev/hbase/0.98.19RC0/hbase-0.98.19-hadoop1-bin.tar.gz   (with props)
dev/hbase/0.98.19RC0/hbase-0.98.19-hadoop1-bin.tar.gz.asc
dev/hbase/0.98.19RC0/hbase-0.98.19-hadoop1-bin.tar.gz.mds
dev/hbase/0.98.19RC0/hbase-0.98.19-hadoop2-bin.tar.gz   (with props)
dev/hbase/0.98.19RC0/hbase-0.98.19-hadoop2-bin.tar.gz.asc
dev/hbase/0.98.19RC0/hbase-0.98.19-hadoop2-bin.tar.gz.mds
dev/hbase/0.98.19RC0/hbase-0.98.19-src.tar.gz   (with props)
dev/hbase/0.98.19RC0/hbase-0.98.19-src.tar.gz.asc
dev/hbase/0.98.19RC0/hbase-0.98.19-src.tar.gz.mds

Added: dev/hbase/0.98.19RC0/0.98.18_0.98.19RC0_compat_report.html
==
--- dev/hbase/0.98.19RC0/0.98.18_0.98.19RC0_compat_report.html (added)
+++ dev/hbase/0.98.19RC0/0.98.18_0.98.19RC0_compat_report.html Sat Apr 23 
02:28:42 2016
@@ -0,0 +1,409 @@
+
+
+http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
+http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
+
+
+
+
+HBase: 0.98.18 to 0.98.19RC0 compatibility report
+
+
+body {
+font-family:Arial, sans-serif;
+background-color:White;
+color:Black;
+}
+hr {
+color:Black;
+background-color:Black;
+height:1px;
+border:0;
+}
+h1 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.625em;
+}
+h2 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.25em;
+white-space:nowrap;
+}
+div.symbols {
+color:#003E69;
+}
+div.symbols i {
+color:Brown;
+}
+span.section {
+font-weight:bold;
+cursor:pointer;
+color:#003E69;
+white-space:nowrap;
+margin-left:5px;
+}
+span:hover.section {
+color:#336699;
+}
+span.sect_aff {
+cursor:pointer;
+margin-left:7px;
+padding-left:15px;
+font-size:0.875em;
+color:#cc3300;
+}
+span.ext {
+font-weight:100;
+}
+span.jar {
+color:#cc3300;
+font-size:0.875em;
+font-weight:bold;
+}
+div.jar_list {
+padding-left:5px;
+font-size:0.94em;
+}
+span.pkg_t {
+color:#408080;
+font-size:0.875em;
+}
+span.pkg {
+color:#408080;
+font-size:0.875em;
+font-weight:bold;
+}
+span.cname {
+color:Green;
+font-size:0.875em;
+font-weight:bold;
+}
+span.iname_b {
+font-weight:bold;
+font-size:1.1em;
+}
+span.iname_a {
+color:#33;
+font-weight:bold;
+font-size:0.94em;
+}
+span.sym_p {
+font-weight:normal;
+white-space:normal;
+}
+span.sym_p span {
+white-space:nowrap;
+}
+span.attr {
+color:Black;
+font-weight:100;
+}
+span.deprecated {
+color:Red;
+font-weight:bold;
+font-family:Monaco, monospace;
+}
+div.affect {
+padding-left:15px;
+padding-bottom:10px;
+font-size:0.87em;
+font-style:italic;
+line-height:0.75em;
+}
+div.affected {
+padding-left:30px;
+padding-top:10px;
+}
+table.ptable {
+border-collapse:collapse;
+border:1px outset black;
+line-height:1em;
+margin-left:15px;
+margin-top:3px;
+margin-bottom:3px;
+width:900px;
+}
+table.ptable td {
+border:1px solid Gray;
+padding: 3px;
+font-size:0.875em;
+text-align:left;
+vertical-align:top;
+}
+table.ptable th {
+background-color:#ee;
+font-weight:bold;
+color:#33;
+font-family:Verdana, Arial;
+font-size:0.81em;
+border:1px solid Gray;
+text-align:center;
+vertical-align:top;
+white-space:nowrap;
+padding: 3px;
+}
+table.summary {
+border-collapse:collapse;
+border:1px outset black;
+}
+table.summary th {
+background-color:#ee;
+font-weight:100;
+text-align:left;
+font-size:0.94em;
+white-space:nowrap;
+border:1px inset gray;
+padding: 3px;
+}
+table.summary td {
+text-align:right;
+white-space:nowrap;
+border:1px inset gray;
+padding: 3px 5px 3px 10px;
+}
+span.mngl {
+padding-left:15px;
+font-size:0.875em;
+cursor:text;
+color:#44;
+}
+span.color_p {
+font-style:italic;
+color:Brown;
+}
+span.param {
+font-style:italic;
+}
+span.focus_p {
+font-style:italic;
+background-color:#FF;
+}
+span.nowrap {
+white-space:nowrap;
+}
+td.passed {
+background-color:#CCFFCC;
+}
+td.warning {
+background-color:#F4F4AF;
+}
+td.failed {
+background-color:#FF;
+}
+td.new {
+background-color:#C6DEFF;
+}
+
+td.compatible {
+background-color:#CCFFCC;
+}
+td.almost_compatible {
+background-color:#FFDAA3;
+}
+td.incompatible {
+background-color:#FF;
+}
+
+.top_ref {
+font-size:0.69em;
+}
+.footer {
+font-size:0.75em;
+}
+.tabset {
+float:left;
+}
+a.tab {
+border:1px solid Black;
+float:left;
+margin:0px 5px -1px 0px;
+padding:3px 5px 3px 5px;
+position:relative;
+ 

[hbase] Git Push Summary

2016-04-22 Thread apurtell
Repository: hbase
Updated Tags:  refs/tags/rel/0.98.18 [created] 30fe6c6e1


hbase git commit: HBASE-15572 Adding optional timestamp semantics to HBase-Spark; ADDENDUM TO FIX DOC FORMATTING ISSUE (Misty)

2016-04-22 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 106fe99a9 -> 36e4033de


HBASE-15572 Adding optional timestamp semantics to HBase-Spark; ADDENDUM TO FIX 
DOC FORMATTING ISSUE (Misty)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/36e4033d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/36e4033d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/36e4033d

Branch: refs/heads/master
Commit: 36e4033dedfa3bf4f9ac5de35bb1c46334aa74c0
Parents: 106fe99
Author: stack 
Authored: Fri Apr 22 14:06:30 2016 -0700
Committer: stack 
Committed: Fri Apr 22 14:06:42 2016 -0700

--
 src/main/asciidoc/_chapters/spark.adoc | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/36e4033d/src/main/asciidoc/_chapters/spark.adoc
--
diff --git a/src/main/asciidoc/_chapters/spark.adoc 
b/src/main/asciidoc/_chapters/spark.adoc
index 22ed468..88918aa 100644
--- a/src/main/asciidoc/_chapters/spark.adoc
+++ b/src/main/asciidoc/_chapters/spark.adoc
@@ -429,6 +429,7 @@ After loading df DataFrame, users can query data.
 df.registerTempTable("table")
 sqlContext.sql("select count(col1) from table").show
 
+
 
 === Predicate Push Down
 
@@ -550,4 +551,4 @@ The last major point to note in the example is the 
`sqlContext.sql` function, wh
 allows the user to ask their questions in SQL which will be pushed down to the
 DefaultSource code in the HBase-Spark module. The result of this command will 
be
 a DataFrame with the Schema of KEY_FIELD and B_FIELD.
-
\ No newline at end of file
+



[07/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
http://git-wip-us.apache.org/repos/asf/hbase/blob/d5d931e5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
index c0b1917..b25f7aa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
@@ -8,7 +8,7 @@ public final class ColumnAggregationWithNullResponseProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
-  public interface SumRequestOrBuilder
+  public interface ColumnAggregationNullResponseSumRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
 // required bytes family = 1;
@@ -32,24 +32,29 @@ public final class ColumnAggregationWithNullResponseProtos {
 com.google.protobuf.ByteString getQualifier();
   }
   /**
-   * Protobuf type {@code SumRequest}
+   * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+   *
+   * 
+   * use unique names for messages in ColumnAggregationXXX.protos due to a bug 
in
+   * protoc or hadoop's protoc compiler.
+   * 
*/
-  public static final class SumRequest extends
+  public static final class ColumnAggregationNullResponseSumRequest extends
   com.google.protobuf.GeneratedMessage
-  implements SumRequestOrBuilder {
-// Use SumRequest.newBuilder() to construct.
-private SumRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements ColumnAggregationNullResponseSumRequestOrBuilder {
+// Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct.
+private 
ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder
 builder) {
   super(builder);
   this.unknownFields = builder.getUnknownFields();
 }
-private SumRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+private ColumnAggregationNullResponseSumRequest(boolean noInit) { 
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-private static final SumRequest defaultInstance;
-public static SumRequest getDefaultInstance() {
+private static final ColumnAggregationNullResponseSumRequest 
defaultInstance;
+public static ColumnAggregationNullResponseSumRequest getDefaultInstance() 
{
   return defaultInstance;
 }
 
-public SumRequest getDefaultInstanceForType() {
+public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() 
{
   return defaultInstance;
 }
 
@@ -59,7 +64,7 @@ public final class ColumnAggregationWithNullResponseProtos {
 getUnknownFields() {
   return this.unknownFields;
 }
-private SumRequest(
+private ColumnAggregationNullResponseSumRequest(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
@@ -106,28 +111,28 @@ public final class 
ColumnAggregationWithNullResponseProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor;
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
-  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.Builder.class);
+  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW

[02/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling 
protobuf

Conflicts:
hbase-protocol/pom.xml

hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4ecd7de0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4ecd7de0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4ecd7de0

Branch: refs/heads/branch-1.2
Commit: 4ecd7de0bfc2df84d8111d8610b6d8a841f8e716
Parents: 3af736a
Author: Enis Soztutar 
Authored: Thu Apr 21 17:02:51 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Apr 21 18:58:31 2016 -0700

--
 hbase-protocol/pom.xml  |   7 +-
 hbase-rest/pom.xml  |   2 +-
 hbase-server/pom.xml|   4 +-
 .../ColumnAggregationEndpointNullResponse.java  |  10 +-
 .../ColumnAggregationEndpointWithErrors.java|   9 +-
 .../TestBatchCoprocessorEndpoint.java   |  32 +-
 .../ColumnAggregationWithErrorsProtos.java  | 314 +-
 ...ColumnAggregationWithNullResponseProtos.java | 315 ++-
 .../ColumnAggregationNullResponseProtocol.proto |   9 +-
 .../ColumnAggregationWithErrorsProtocol.proto   |   9 +-
 10 files changed, 377 insertions(+), 334 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4ecd7de0/hbase-protocol/pom.xml
--
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index 40fb6bc..60cf061 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -166,6 +166,8 @@
   
   
 ${basedir}/src/main/protobuf
+
 
   AccessControl.proto
   Admin.proto
@@ -178,8 +180,8 @@
   Comparator.proto
   Encryption.proto
   ErrorHandling.proto
-  Filter.proto
   FS.proto
+  Filter.proto
   HBase.proto
   HFile.proto
   LoadBalancer.proto
@@ -189,11 +191,12 @@
   MultiRowMutation.proto
   Procedure.proto
   Quota.proto
+  RPC.proto
   RegionNormalizer.proto
   RegionServerStatus.proto
   RowProcessor.proto
-  RPC.proto
   SecureBulkLoad.proto
+  Snapshot.proto
   Tracing.proto
   VisibilityLabels.proto
   WAL.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/4ecd7de0/hbase-rest/pom.xml
--
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index e01d7bc..5f9425f 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -366,8 +366,8 @@
   CellMessage.proto
   CellSetMessage.proto
   ColumnSchemaMessage.proto
-  NamespacesMessage.proto
   NamespacePropertiesMessage.proto
+  NamespacesMessage.proto
   ScannerMessage.proto
   StorageClusterStatusMessage.proto
   TableInfoMessage.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/4ecd7de0/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 0543320..61a9105 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -868,10 +868,12 @@
   
 ${basedir}/src/test/protobuf
 
+  
ColumnAggregationNullResponseProtocol.proto
   ColumnAggregationProtocol.proto
+  
ColumnAggregationWithErrorsProtocol.proto
+  DummyRegionServerEndpoint.proto
   IncrementCounterProcessor.proto
   PingProtocol.proto
-  DummyRegionServerEndpoint.proto
   TestProcedure.proto
   test.proto
   test_rpc_service.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/4ecd7de0/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
--
diff --git 
a/hb

[03/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
http://git-wip-us.apache.org/repos/asf/hbase/blob/106fe99a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
index c0b1917..b25f7aa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
@@ -8,7 +8,7 @@ public final class ColumnAggregationWithNullResponseProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
-  public interface SumRequestOrBuilder
+  public interface ColumnAggregationNullResponseSumRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
 // required bytes family = 1;
@@ -32,24 +32,29 @@ public final class ColumnAggregationWithNullResponseProtos {
 com.google.protobuf.ByteString getQualifier();
   }
   /**
-   * Protobuf type {@code SumRequest}
+   * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+   *
+   * 
+   * use unique names for messages in ColumnAggregationXXX.protos due to a bug 
in
+   * protoc or hadoop's protoc compiler.
+   * 
*/
-  public static final class SumRequest extends
+  public static final class ColumnAggregationNullResponseSumRequest extends
   com.google.protobuf.GeneratedMessage
-  implements SumRequestOrBuilder {
-// Use SumRequest.newBuilder() to construct.
-private SumRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements ColumnAggregationNullResponseSumRequestOrBuilder {
+// Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct.
+private 
ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder
 builder) {
   super(builder);
   this.unknownFields = builder.getUnknownFields();
 }
-private SumRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+private ColumnAggregationNullResponseSumRequest(boolean noInit) { 
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-private static final SumRequest defaultInstance;
-public static SumRequest getDefaultInstance() {
+private static final ColumnAggregationNullResponseSumRequest 
defaultInstance;
+public static ColumnAggregationNullResponseSumRequest getDefaultInstance() 
{
   return defaultInstance;
 }
 
-public SumRequest getDefaultInstanceForType() {
+public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() 
{
   return defaultInstance;
 }
 
@@ -59,7 +64,7 @@ public final class ColumnAggregationWithNullResponseProtos {
 getUnknownFields() {
   return this.unknownFields;
 }
-private SumRequest(
+private ColumnAggregationNullResponseSumRequest(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
@@ -106,28 +111,28 @@ public final class 
ColumnAggregationWithNullResponseProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor;
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
-  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.Builder.class);
+  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW

[01/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 27446a5c4 -> 51be6569e
  refs/heads/branch-1.1 aef2d17e7 -> 8bf4fe473
  refs/heads/branch-1.2 3af736a95 -> 4ecd7de0b
  refs/heads/branch-1.3 fe632c214 -> d5d931e5d
  refs/heads/master 1ecb10ce0 -> 106fe99a9


http://git-wip-us.apache.org/repos/asf/hbase/blob/4ecd7de0/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
index c0b1917..b25f7aa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
@@ -8,7 +8,7 @@ public final class ColumnAggregationWithNullResponseProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
-  public interface SumRequestOrBuilder
+  public interface ColumnAggregationNullResponseSumRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
 // required bytes family = 1;
@@ -32,24 +32,29 @@ public final class ColumnAggregationWithNullResponseProtos {
 com.google.protobuf.ByteString getQualifier();
   }
   /**
-   * Protobuf type {@code SumRequest}
+   * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+   *
+   * 
+   * use unique names for messages in ColumnAggregationXXX.protos due to a bug 
in
+   * protoc or hadoop's protoc compiler.
+   * 
*/
-  public static final class SumRequest extends
+  public static final class ColumnAggregationNullResponseSumRequest extends
   com.google.protobuf.GeneratedMessage
-  implements SumRequestOrBuilder {
-// Use SumRequest.newBuilder() to construct.
-private SumRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements ColumnAggregationNullResponseSumRequestOrBuilder {
+// Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct.
+private 
ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder
 builder) {
   super(builder);
   this.unknownFields = builder.getUnknownFields();
 }
-private SumRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+private ColumnAggregationNullResponseSumRequest(boolean noInit) { 
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-private static final SumRequest defaultInstance;
-public static SumRequest getDefaultInstance() {
+private static final ColumnAggregationNullResponseSumRequest 
defaultInstance;
+public static ColumnAggregationNullResponseSumRequest getDefaultInstance() 
{
   return defaultInstance;
 }
 
-public SumRequest getDefaultInstanceForType() {
+public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() 
{
   return defaultInstance;
 }
 
@@ -59,7 +64,7 @@ public final class ColumnAggregationWithNullResponseProtos {
 getUnknownFields() {
   return this.unknownFields;
 }
-private SumRequest(
+private ColumnAggregationNullResponseSumRequest(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
@@ -106,28 +111,28 @@ public final class 
ColumnAggregationWithNullResponseProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor;
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
-  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponse

[06/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling 
protobuf

Conflicts:
hbase-protocol/pom.xml

hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51be6569
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51be6569
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51be6569

Branch: refs/heads/branch-1
Commit: 51be6569e63fb08b2e3820f7fbdad8f06840448a
Parents: 27446a5
Author: Enis Soztutar 
Authored: Thu Apr 21 17:02:51 2016 -0700
Committer: Enis Soztutar 
Committed: Fri Apr 22 11:44:14 2016 -0700

--
 hbase-protocol/pom.xml  |   7 +-
 hbase-rest/pom.xml  |   2 +-
 hbase-server/pom.xml|   4 +-
 .../ColumnAggregationEndpointNullResponse.java  |  10 +-
 .../ColumnAggregationEndpointWithErrors.java|   9 +-
 .../TestBatchCoprocessorEndpoint.java   |  32 +-
 .../ColumnAggregationWithErrorsProtos.java  | 314 +-
 ...ColumnAggregationWithNullResponseProtos.java | 315 ++-
 .../ColumnAggregationNullResponseProtocol.proto |   9 +-
 .../ColumnAggregationWithErrorsProtocol.proto   |   9 +-
 10 files changed, 377 insertions(+), 334 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51be6569/hbase-protocol/pom.xml
--
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index e602baf..9424691 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -166,6 +166,8 @@
   
   
 ${basedir}/src/main/protobuf
+
 
   AccessControl.proto
   Admin.proto
@@ -178,8 +180,8 @@
   Comparator.proto
   Encryption.proto
   ErrorHandling.proto
-  Filter.proto
   FS.proto
+  Filter.proto
   HBase.proto
   HFile.proto
   LoadBalancer.proto
@@ -189,11 +191,12 @@
   MultiRowMutation.proto
   Procedure.proto
   Quota.proto
+  RPC.proto
   RegionNormalizer.proto
   RegionServerStatus.proto
   RowProcessor.proto
-  RPC.proto
   SecureBulkLoad.proto
+  Snapshot.proto
   Tracing.proto
   VisibilityLabels.proto
   WAL.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/51be6569/hbase-rest/pom.xml
--
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index df48106..882e6d1 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -366,8 +366,8 @@
   CellMessage.proto
   CellSetMessage.proto
   ColumnSchemaMessage.proto
-  NamespacesMessage.proto
   NamespacePropertiesMessage.proto
+  NamespacesMessage.proto
   ScannerMessage.proto
   StorageClusterStatusMessage.proto
   TableInfoMessage.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/51be6569/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index b044b62..8762034 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -868,10 +868,12 @@
   
 ${basedir}/src/test/protobuf
 
+  
ColumnAggregationNullResponseProtocol.proto
   ColumnAggregationProtocol.proto
+  
ColumnAggregationWithErrorsProtocol.proto
+  DummyRegionServerEndpoint.proto
   IncrementCounterProcessor.proto
   PingProtocol.proto
-  DummyRegionServerEndpoint.proto
   TestProcedure.proto
   test.proto
   test_rpc_service.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/51be6569/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
--
diff --git 
a/hbas

[08/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling 
protobuf

Conflicts:
hbase-protocol/pom.xml

hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d5d931e5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d5d931e5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d5d931e5

Branch: refs/heads/branch-1.3
Commit: d5d931e5d7fced14024e7c52efbfffeb40ecd4ba
Parents: fe632c2
Author: Enis Soztutar 
Authored: Thu Apr 21 17:02:51 2016 -0700
Committer: Enis Soztutar 
Committed: Fri Apr 22 11:45:41 2016 -0700

--
 hbase-protocol/pom.xml  |   7 +-
 hbase-rest/pom.xml  |   2 +-
 hbase-server/pom.xml|   4 +-
 .../ColumnAggregationEndpointNullResponse.java  |  10 +-
 .../ColumnAggregationEndpointWithErrors.java|   9 +-
 .../TestBatchCoprocessorEndpoint.java   |  32 +-
 .../ColumnAggregationWithErrorsProtos.java  | 314 +-
 ...ColumnAggregationWithNullResponseProtos.java | 315 ++-
 .../ColumnAggregationNullResponseProtocol.proto |   9 +-
 .../ColumnAggregationWithErrorsProtocol.proto   |   9 +-
 10 files changed, 377 insertions(+), 334 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d5d931e5/hbase-protocol/pom.xml
--
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index 95b6819..c21b5d7 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -166,6 +166,8 @@
   
   
 ${basedir}/src/main/protobuf
+
 
   AccessControl.proto
   Admin.proto
@@ -178,8 +180,8 @@
   Comparator.proto
   Encryption.proto
   ErrorHandling.proto
-  Filter.proto
   FS.proto
+  Filter.proto
   HBase.proto
   HFile.proto
   LoadBalancer.proto
@@ -189,11 +191,12 @@
   MultiRowMutation.proto
   Procedure.proto
   Quota.proto
+  RPC.proto
   RegionNormalizer.proto
   RegionServerStatus.proto
   RowProcessor.proto
-  RPC.proto
   SecureBulkLoad.proto
+  Snapshot.proto
   Tracing.proto
   VisibilityLabels.proto
   WAL.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5d931e5/hbase-rest/pom.xml
--
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 75320da..029e70e 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -366,8 +366,8 @@
   CellMessage.proto
   CellSetMessage.proto
   ColumnSchemaMessage.proto
-  NamespacesMessage.proto
   NamespacePropertiesMessage.proto
+  NamespacesMessage.proto
   ScannerMessage.proto
   StorageClusterStatusMessage.proto
   TableInfoMessage.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5d931e5/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index a2cf010..f5f2706 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -868,10 +868,12 @@
   
 ${basedir}/src/test/protobuf
 
+  
ColumnAggregationNullResponseProtocol.proto
   ColumnAggregationProtocol.proto
+  
ColumnAggregationWithErrorsProtocol.proto
+  DummyRegionServerEndpoint.proto
   IncrementCounterProcessor.proto
   PingProtocol.proto
-  DummyRegionServerEndpoint.proto
   TestProcedure.proto
   test.proto
   test_rpc_service.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/d5d931e5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
--
diff --git 
a/hb

[09/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
http://git-wip-us.apache.org/repos/asf/hbase/blob/8bf4fe47/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
index c0b1917..b25f7aa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
@@ -8,7 +8,7 @@ public final class ColumnAggregationWithNullResponseProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
-  public interface SumRequestOrBuilder
+  public interface ColumnAggregationNullResponseSumRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
 // required bytes family = 1;
@@ -32,24 +32,29 @@ public final class ColumnAggregationWithNullResponseProtos {
 com.google.protobuf.ByteString getQualifier();
   }
   /**
-   * Protobuf type {@code SumRequest}
+   * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+   *
+   * 
+   * use unique names for messages in ColumnAggregationXXX.protos due to a bug 
in
+   * protoc or hadoop's protoc compiler.
+   * 
*/
-  public static final class SumRequest extends
+  public static final class ColumnAggregationNullResponseSumRequest extends
   com.google.protobuf.GeneratedMessage
-  implements SumRequestOrBuilder {
-// Use SumRequest.newBuilder() to construct.
-private SumRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements ColumnAggregationNullResponseSumRequestOrBuilder {
+// Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct.
+private 
ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder
 builder) {
   super(builder);
   this.unknownFields = builder.getUnknownFields();
 }
-private SumRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+private ColumnAggregationNullResponseSumRequest(boolean noInit) { 
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-private static final SumRequest defaultInstance;
-public static SumRequest getDefaultInstance() {
+private static final ColumnAggregationNullResponseSumRequest 
defaultInstance;
+public static ColumnAggregationNullResponseSumRequest getDefaultInstance() 
{
   return defaultInstance;
 }
 
-public SumRequest getDefaultInstanceForType() {
+public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() 
{
   return defaultInstance;
 }
 
@@ -59,7 +64,7 @@ public final class ColumnAggregationWithNullResponseProtos {
 getUnknownFields() {
   return this.unknownFields;
 }
-private SumRequest(
+private ColumnAggregationNullResponseSumRequest(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
@@ -106,28 +111,28 @@ public final class 
ColumnAggregationWithNullResponseProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor;
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
-  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.Builder.class);
+  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW

[04/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling 
protobuf


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/106fe99a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/106fe99a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/106fe99a

Branch: refs/heads/master
Commit: 106fe99a9aca17d02455e480555211b67a262959
Parents: 1ecb10c
Author: Enis Soztutar 
Authored: Thu Apr 21 17:02:51 2016 -0700
Committer: Enis Soztutar 
Committed: Fri Apr 22 11:34:49 2016 -0700

--
 hbase-protocol/pom.xml  |  11 +-
 hbase-rest/pom.xml  |   2 +-
 hbase-server/pom.xml|   4 +-
 .../ColumnAggregationEndpointNullResponse.java  |  10 +-
 .../ColumnAggregationEndpointWithErrors.java|   9 +-
 .../TestBatchCoprocessorEndpoint.java   |  32 +-
 .../ColumnAggregationWithErrorsProtos.java  | 314 +-
 ...ColumnAggregationWithNullResponseProtos.java | 315 ++-
 .../ColumnAggregationNullResponseProtocol.proto |   9 +-
 .../ColumnAggregationWithErrorsProtocol.proto   |   9 +-
 10 files changed, 379 insertions(+), 336 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/106fe99a/hbase-protocol/pom.xml
--
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index d43b7b9..56ab13a 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -166,6 +166,8 @@
   
   
 ${basedir}/src/main/protobuf
+
 
   AccessControl.proto
   Admin.proto
@@ -178,8 +180,8 @@
   Comparator.proto
   Encryption.proto
   ErrorHandling.proto
-  Filter.proto
   FS.proto
+  Filter.proto
   HBase.proto
   HFile.proto
   LoadBalancer.proto
@@ -189,13 +191,14 @@
   MultiRowMutation.proto
   Procedure.proto
   Quota.proto
-  RegionNormalizer.proto
-  RegionServerStatus.proto
-  RowProcessor.proto
   RPC.proto
   RSGroup.proto
   RSGroupAdmin.proto
+  RegionNormalizer.proto
+  RegionServerStatus.proto
+  RowProcessor.proto
   SecureBulkLoad.proto
+  Snapshot.proto
   Tracing.proto
   VisibilityLabels.proto
   WAL.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/106fe99a/hbase-rest/pom.xml
--
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 3443afc..7bacf95 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -366,8 +366,8 @@
   CellMessage.proto
   CellSetMessage.proto
   ColumnSchemaMessage.proto
-  NamespacesMessage.proto
   NamespacePropertiesMessage.proto
+  NamespacesMessage.proto
   ScannerMessage.proto
   StorageClusterStatusMessage.proto
   TableInfoMessage.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/106fe99a/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index d5f1e30..2631417 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -863,10 +863,12 @@
   
 ${basedir}/src/test/protobuf
 
+  
ColumnAggregationNullResponseProtocol.proto
   ColumnAggregationProtocol.proto
+  
ColumnAggregationWithErrorsProtocol.proto
+  DummyRegionServerEndpoint.proto
   IncrementCounterProcessor.proto
   PingProtocol.proto
-  DummyRegionServerEndpoint.proto
   TestProcedure.proto
   test.proto
   test_rpc_service.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/106fe99a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
--

[10/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling 
protobuf

Conflicts:
hbase-protocol/pom.xml

hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java

Conflicts:
hbase-protocol/pom.xml
hbase-rest/pom.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8bf4fe47
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8bf4fe47
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8bf4fe47

Branch: refs/heads/branch-1.1
Commit: 8bf4fe4737efbc3f7873907dcae21fe0ce92f7ac
Parents: aef2d17
Author: Enis Soztutar 
Authored: Thu Apr 21 17:02:51 2016 -0700
Committer: Enis Soztutar 
Committed: Fri Apr 22 11:48:33 2016 -0700

--
 hbase-protocol/pom.xml  |   7 +-
 hbase-server/pom.xml|   3 +
 .../ColumnAggregationEndpointNullResponse.java  |  10 +-
 .../ColumnAggregationEndpointWithErrors.java|   9 +-
 .../TestBatchCoprocessorEndpoint.java   |  32 +-
 .../ColumnAggregationWithErrorsProtos.java  | 314 +-
 ...ColumnAggregationWithNullResponseProtos.java | 315 ++-
 .../ColumnAggregationNullResponseProtocol.proto |   9 +-
 .../ColumnAggregationWithErrorsProtocol.proto   |   9 +-
 9 files changed, 376 insertions(+), 332 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8bf4fe47/hbase-protocol/pom.xml
--
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index 4630632..08ba63c 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -166,6 +166,8 @@
   
   
 ${basedir}/src/main/protobuf
+
 
   AccessControl.proto
   Admin.proto
@@ -178,8 +180,8 @@
   Comparator.proto
   Encryption.proto
   ErrorHandling.proto
-  Filter.proto
   FS.proto
+  Filter.proto
   HBase.proto
   HFile.proto
   LoadBalancer.proto
@@ -189,10 +191,11 @@
   MultiRowMutation.proto
   Procedure.proto
   Quota.proto
+  RPC.proto
   RegionServerStatus.proto
   RowProcessor.proto
-  RPC.proto
   SecureBulkLoad.proto
+  Snapshot.proto
   Tracing.proto
   VisibilityLabels.proto
   WAL.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/8bf4fe47/hbase-server/pom.xml
--
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 979fc5f..538bd67 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -879,7 +879,10 @@
   
 ${basedir}/src/test/protobuf
 
+  
ColumnAggregationNullResponseProtocol.proto
   ColumnAggregationProtocol.proto
+  
ColumnAggregationWithErrorsProtocol.proto
+  DummyRegionServerEndpoint.proto
   IncrementCounterProcessor.proto
   PingProtocol.proto
   TestProcedure.proto

http://git-wip-us.apache.org/repos/asf/hbase/blob/8bf4fe47/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
index 4315946..5b8dd07 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Scan;
 import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationServiceNullResponse;
-import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest;
-import 
org.apache.

[05/10] hbase git commit: HBASE-15670 Add missing Snapshot.proto to the maven profile for compiling protobuf

2016-04-22 Thread enis
http://git-wip-us.apache.org/repos/asf/hbase/blob/51be6569/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
index c0b1917..b25f7aa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
@@ -8,7 +8,7 @@ public final class ColumnAggregationWithNullResponseProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
-  public interface SumRequestOrBuilder
+  public interface ColumnAggregationNullResponseSumRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
 // required bytes family = 1;
@@ -32,24 +32,29 @@ public final class ColumnAggregationWithNullResponseProtos {
 com.google.protobuf.ByteString getQualifier();
   }
   /**
-   * Protobuf type {@code SumRequest}
+   * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+   *
+   * 
+   * use unique names for messages in ColumnAggregationXXX.protos due to a bug 
in
+   * protoc or hadoop's protoc compiler.
+   * 
*/
-  public static final class SumRequest extends
+  public static final class ColumnAggregationNullResponseSumRequest extends
   com.google.protobuf.GeneratedMessage
-  implements SumRequestOrBuilder {
-// Use SumRequest.newBuilder() to construct.
-private SumRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements ColumnAggregationNullResponseSumRequestOrBuilder {
+// Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct.
+private 
ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder
 builder) {
   super(builder);
   this.unknownFields = builder.getUnknownFields();
 }
-private SumRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+private ColumnAggregationNullResponseSumRequest(boolean noInit) { 
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-private static final SumRequest defaultInstance;
-public static SumRequest getDefaultInstance() {
+private static final ColumnAggregationNullResponseSumRequest 
defaultInstance;
+public static ColumnAggregationNullResponseSumRequest getDefaultInstance() 
{
   return defaultInstance;
 }
 
-public SumRequest getDefaultInstanceForType() {
+public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() 
{
   return defaultInstance;
 }
 
@@ -59,7 +64,7 @@ public final class ColumnAggregationWithNullResponseProtos {
 getUnknownFields() {
   return this.unknownFields;
 }
-private SumRequest(
+private ColumnAggregationNullResponseSumRequest(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
@@ -106,28 +111,28 @@ public final class 
ColumnAggregationWithNullResponseProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor;
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
-  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.Builder.class);
+  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW

[36/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index c31601d..60b8af5 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -184,11 +184,11 @@
 
 
 Server
-BaseCoordinatedStateManager.getServer() 
+ZkCoordinatedStateManager.getServer() 
 
 
 Server
-ZkCoordinatedStateManager.getServer() 
+BaseCoordinatedStateManager.getServer() 
 
 
 
@@ -201,11 +201,11 @@
 
 
 void
-BaseCoordinatedStateManager.initialize(Server server) 
+ZkCoordinatedStateManager.initialize(Server server) 
 
 
 void
-ZkCoordinatedStateManager.initialize(Server server) 
+BaseCoordinatedStateManager.initialize(Server server) 
 
 
 
@@ -324,33 +324,33 @@
 
 
 
-(package private) Server
-MasterFileSystem.master 
-
-
 private Server
 ActiveMasterManager.master 
 
+
+(package private) Server
+MasterFileSystem.master 
+
 
 private Server
 ServerManager.master 
 
 
 private Server
-SplitLogManager.server 
+RegionStateStore.server 
 
 
 private Server
-RegionStateStore.server 
+SplitLogManager.server 
 
 
-protected Server
-BulkAssigner.server 
-
-
 private Server
 CatalogJanitor.server 
 
+
+protected Server
+BulkAssigner.server 
+
 
 
 
@@ -514,7 +514,7 @@
 
 
 private Server
-LogRoller.server 
+HeapMemoryManager.server 
 
 
 private Server
@@ -530,7 +530,7 @@
 
 
 private Server
-HeapMemoryManager.server 
+LogRoller.server 
 
 
 
@@ -543,13 +543,11 @@
 
 
 Server
-SplitTransaction.getServer()
-Get the Server running the transaction or rollback
-
+RegionMergeTransactionImpl.getServer() 
 
 
 Server
-RegionMergeTransactionImpl.getServer() 
+SplitTransactionImpl.getServer() 
 
 
 Server
@@ -559,7 +557,9 @@
 
 
 Server
-SplitTransactionImpl.getServer() 
+SplitTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 
@@ -594,19 +594,15 @@
 
 
 
-PairOfSameType
-SplitTransaction.execute(Server server,
-  RegionServerServices services)
-Deprecated. 
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
-
-
-
-
 Region
 RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services) 
 
+
+PairOfSameType
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services) 
+
 
 Region
 RegionMergeTransaction.execute(Server server,
@@ -618,20 +614,22 @@
 
 
 PairOfSameType
-SplitTransactionImpl.execute(Server server,
-  RegionServerServices services) 
+SplitTransaction.execute(Server server,
+  RegionServerServices services)
+Deprecated. 
+use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
+
+
 
 
-PairOfSameType
-SplitTransaction.execute(Server server,
+Region
+RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services,
-  User user)
-Run the transaction.
-
+  User user) 
 
 
-Region
-RegionMergeTransactionImpl.execute(Server server,
+PairOfSameType
+SplitTransactionImpl.execute(Server server,
   RegionServerServices services,
   User user) 
 
@@ -645,9 +643,11 @@
 
 
 PairOfSameType
-SplitTransactionImpl.execute(Server server,
+SplitTransaction.execute(Server server,
   RegionServerServices services,
-  User user) 
+  User user)
+Run the transaction.
+
 
 
 void
@@ -684,16 +684,12 @@
 
 
 boolean
-SplitTransaction.rollback(Server server,
-RegionServerServices services)
-Deprecated. 
-use #rollback(Server, RegionServerServices, User); as of 
1.0.2, remove in 3.0
-
-
+RegionMergeTransactionImpl.rollback(Server server,
+RegionServerServices services) 
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Server server,
+SplitTransactionImpl.rollback(Server server,
 RegionServerServices services) 
 
 
@@ -707,20 +703,22 @@
 
 
 boolean
-SplitTransactionImpl.rollback(Server server,
-RegionServerServices services) 
+SplitTransaction.rollback(Server server,
+RegionServerServices services)
+Deprecated. 
+use #rollback(Server, RegionServerServices, User); as of 
1.0.2, remove in 3.0
+
+
 
 
 boolean
-SplitTransaction.rollback(Server server,
+RegionMergeTransactionImpl.rollback(Server server,
 RegionServerServices services,
-User user)
-Roll back a failed transaction
-
+User user) 
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Server server,
+SplitTransactionImpl.rollback(Server server,
 RegionServerServices services,
 User user) 
 
@@ -734,9 +732,11 @@
 
 
 boolean
-SplitTransactionImpl.rollback(Server server,
+SplitTransa

[32/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 1a8375a..2e6d3dd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -581,14 +581,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DefaultVisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String visExpression) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 VisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression)
 Convert visibility expression into tags to be 
serialized.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+DefaultVisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String visExpression) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
index 72d1fdd..f1c6d57 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
@@ -140,19 +140,19 @@
 
 
 
-org.apache.hadoop.hbase.http
+org.apache.hadoop.hbase.http.lib
 
 
- Copied from hadoop source code.
- See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
+ This package provides user-selectable (via configuration) classes that add
+ functionality to the web UI.
 
 
 
-org.apache.hadoop.hbase.http.lib
+org.apache.hadoop.hbase.http
 
 
- This package provides user-selectable (via configuration) classes that add
- functionality to the web UI.
+ Copied from hadoop source code.
+ See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
index dcb7747..7284d69 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
@@ -81,11 +81,11 @@
 Annotation Type Hierarchy
 
 org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true";
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external

[02/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 8165c4e..ace9b3d 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,113 +116,113 @@
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRange src) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRange src) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+byte[]
+OrderedBlob.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRange src) 
+RawInteger.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRange src) 
+OrderedInt32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRange src) 
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+byte[]
+OrderedBlobVar.decode(PositionedByteRange src) 
 
 
 byte[]
-OrderedBlob.decode(PositionedByteRange src) 
+RawBytes.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRange src) 
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRange src) 
-
-
 T
 DataType.decode(PositionedByteRange src)
 Read an instance of T from the buffer 
src.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRange src) 
-
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRange src) 
 
 
-byte[]
-OrderedBlobVar.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+RawDouble.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=tru

[31/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index 728b572..ff36a51 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -142,18 +142,18 @@ the order they are declared.
 
 
 
-Query
-Query.setConsistency(Consistency consistency)
-Sets the consistency level for this operation
-
+Get
+Get.setConsistency(Consistency consistency) 
 
 
 Scan
 Scan.setConsistency(Consistency consistency) 
 
 
-Get
-Get.setConsistency(Consistency consistency) 
+Query
+Query.setConsistency(Consistency consistency)
+Sets the consistency level for this operation
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 93d0d98..25795cd 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -321,7 +321,7 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[] row,
+Table.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 byte[] value,
@@ -332,7 +332,7 @@ service.
 
 
 boolean
-Table.checkAndDelete(byte[] row,
+HTable.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 byte[] value,
@@ -351,7 +351,7 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[] row,
+Table.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
@@ -363,7 +363,7 @@ service.
 
 
 boolean
-Table.checkAndDelete(byte[] row,
+HTable.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
@@ -384,13 +384,13 @@ service.
 
 
 void
-HTable.delete(Delete delete)
+Table.delete(Delete delete)
 Deletes the specified cells/row.
 
 
 
 void
-Table.delete(Delete delete)
+HTable.delete(Delete delete)
 Deletes the specified cells/row.
 
 
@@ -409,13 +409,13 @@ service.
 
 
 void
-HTable.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
+Table.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
 Deletes the specified cells/rows in bulk.
 
 
 
 void
-Table.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
+HTable.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
 Deletes the specified cells/rows in bulk.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index dc4ebf6..4cdc905 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -241,7 +241,7 @@ the order they are declared.
 
 
 long
-HTable.incrementColumnValue(byte[] row,
+Table.incrementColumnValue(byte[] row,
 byte[] family,
 byte[] qualifier,
 long amount,
@@ -251,7 +251,7 @@ the order they are declared.
 
 
 long
-Table.incrementColumnValue(byte[] row,
+HTable.incrementColumnValue(byte[] row,
 byte[] family,
 byte[] qualifier,
 long amount,
@@ -268,6 +268,14 @@ the order they are declared.
 Durability durability) 
 
 
+Append
+Append.setDurability(Durability d) 
+
+
+Put
+Put.setDurability(Durability d) 
+
+
 Delete
 Delete.setDurability(Durability d) 
 
@@ -281,14 +289,6 @@ the order they are declared.
 Increment
 Inc

[20/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRPCErrorHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRPCErrorHandler.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRPCErrorHandler.html
index 43fe020..8058aaa 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRPCErrorHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/HBaseRPCErrorHandler.html
@@ -119,11 +119,11 @@
 
 
 HBaseRPCErrorHandler
-RpcServer.getErrorHandler() 
+RpcServerInterface.getErrorHandler() 
 
 
 HBaseRPCErrorHandler
-RpcServerInterface.getErrorHandler() 
+RpcServer.getErrorHandler() 
 
 
 
@@ -136,13 +136,13 @@
 
 
 void
-RpcServer.setErrorHandler(HBaseRPCErrorHandler handler)
-Set the handler for calling out of RPC for error 
conditions.
-
+RpcServerInterface.setErrorHandler(HBaseRPCErrorHandler handler) 
 
 
 void
-RpcServerInterface.setErrorHandler(HBaseRPCErrorHandler handler) 
+RpcServer.setErrorHandler(HBaseRPCErrorHandler handler)
+Set the handler for calling out of RPC for error 
conditions.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
index dafbe2e..528a391 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/MetricsHBaseServer.html
@@ -111,13 +111,13 @@
 
 
 MetricsHBaseServer
-RpcServer.getMetrics()
+RpcServerInterface.getMetrics()
 Returns the metrics instance for reporting RPC call 
statistics
 
 
 
 MetricsHBaseServer
-RpcServerInterface.getMetrics()
+RpcServer.getMetrics()
 Returns the metrics instance for reporting RPC call 
statistics
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
index c416900..828c1db 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
@@ -111,16 +111,16 @@
 
 
 
-private PayloadCarryingRpcController
-RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.controller 
+protected PayloadCarryingRpcController
+ScannerCallable.controller 
 
 
 protected PayloadCarryingRpcController
 PayloadCarryingServerCallable.controller 
 
 
-protected PayloadCarryingRpcController
-ScannerCallable.controller 
+private PayloadCarryingRpcController
+RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.controller 
 
 
 
@@ -204,13 +204,13 @@
 
 
 
-protected Pair
-AsyncRpcClient.call(PayloadCarryingRpcController pcrc,
+protected abstract Pair
+AbstractRpcClient.call(PayloadCarryingRpcController pcrc,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 com.google.protobuf.Message returnType,
 User ticket,
-http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress addr,
+http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress isa,
 MetricsConnection.CallStats callStats)
 Make a call, passing param, to the IPC server 
running at
  address which is servicing the protocol protocol,
@@ -218,13 +218,13 @@
 
 
 
-protected abstract Pair
-AbstractRpcClient.call(PayloadCarryingRpcController pcrc,
+protected Pair
+RpcClientImpl.call(PayloadCarryingRpcController pcrc,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 com.google.protobuf.Message returnType,
 User ticket,
-http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress isa,
+http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress addr,
 MetricsConnection.CallStats callStats)
 Make a call, passing param, to the IPC server 
runnin

[27/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index 71fedf2..9b9ec87 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -120,11 +120,11 @@
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
@@ -140,7 +140,7 @@
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
@@ -148,7 +148,7 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
@@ -164,13 +164,13 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
@@ -184,13 +184,13 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily) 
 
@@ -208,7 +208,7 @@
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
@@ -216,7 +216,7 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
@@ -231,12 +231,12 @@
 
 
 void
-BaseMasterObserver.postAddRSGroup(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddRSGroup(ObserverContext ctx,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
 
 void
-BaseMasterAndRegionObserver.postAddRSGroup(ObserverContext ctx,
+BaseMasterObserver.postAddRSGroup(ObserverContext ctx,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
 
@@ -248,12 +248,12 @@
 
 
 void
-BaseMasterObserver.postAssign(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
 
 void
-BaseMasterAndRegionObserver.postAssign(Obse

[07/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
index bb9e6fc..00f3ade 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
@@ -358,7 +358,7 @@ the order they are declared.
 
 
 values
-public static AccessController.OpType[] values()
+public static AccessController.OpType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -375,7 +375,7 @@ for (AccessController.OpType c : 
AccessController.OpType.values())
 
 
 valueOf
-public static AccessController.OpType valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static AccessController.OpType valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
index 1ebfd6a..d88b53c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
@@ -101,43 +101,43 @@
 
 
 
-Query
-Query.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Get
+Get.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Delete
-Delete.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Append
+Append.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Mutation
-Mutation.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Scan
+Scan.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Increment
-Increment.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Query
+Query.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Append
-Append.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Put
+Put.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Scan
-Scan.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Delete
+Delete.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Put
-Put.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Mutation
+Mutation.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permission perms) 
 
 
-Get
-Get.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
+Increment
+Increment.setACL(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String user,
 Permissi

[22/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index cf72bd9..bbc913f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -109,35 +109,35 @@
 
 
 protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+DiffKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-FastDiffDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected abstract http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected abstract http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+FastDiffDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
 protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-PrefixKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
 protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-DiffKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+PrefixKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org

[29/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
index f979258..d905c8f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
@@ -210,15 +210,15 @@ service.
 
 
 private Scan
-ScannerCallableWithReplicas.scan 
+ScannerCallable.scan 
 
 
-protected Scan
-ClientScanner.scan 
+private Scan
+ScannerCallableWithReplicas.scan 
 
 
-private Scan
-ScannerCallable.scan 
+protected Scan
+ClientScanner.scan 
 
 
 private Scan
@@ -248,11 +248,11 @@ service.
 
 
 protected Scan
-ClientScanner.getScan() 
+ScannerCallable.getScan() 
 
 
 protected Scan
-ScannerCallable.getScan() 
+ClientScanner.getScan() 
 
 
 Scan
@@ -454,15 +454,15 @@ service.
 
 
 ResultScanner
-HTable.getScanner(Scan scan)
-The underlying HTable must 
not be closed.
+Table.getScanner(Scan scan)
+Returns a scanner on the current table as specified by the 
Scan
+ object.
 
 
 
 ResultScanner
-Table.getScanner(Scan scan)
-Returns a scanner on the current table as specified by the 
Scan
- object.
+HTable.getScanner(Scan scan)
+The underlying HTable must 
not be closed.
 
 
 
@@ -1036,14 +1036,14 @@ service.
 
 
 private Scan
-TableSnapshotInputFormatImpl.RecordReader.scan 
-
-
-private Scan
 TableInputFormatBase.scan
 Holds the details for the internal scanner.
 
 
+
+private Scan
+TableSnapshotInputFormatImpl.RecordReader.scan 
+
 
 
 
@@ -1080,12 +1080,12 @@ service.
 
 
 private static Scan
-CellCounter.getConfiguredScanForJob(org.apache.hadoop.conf.Configuration conf,
+Export.getConfiguredScanForJob(org.apache.hadoop.conf.Configuration conf,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String[] args) 
 
 
 private static Scan
-Export.getConfiguredScanForJob(org.apache.hadoop.conf.Configuration conf,
+CellCounter.getConfiguredScanForJob(org.apache.hadoop.conf.Configuration conf,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String[] args) 
 
 
@@ -1308,13 +1308,13 @@ service.
 
 
 void
-TableRecordReader.setScan(Scan scan)
+TableInputFormatBase.setScan(Scan scan)
 Sets the scan defining the actual details like columns 
etc.
 
 
 
 void
-TableInputFormatBase.setScan(Scan scan)
+TableRecordReader.setScan(Scan scan)
 Sets the scan defining the actual details like columns 
etc.
 
 
@@ -1374,12 +1374,6 @@ service.
 
 
 
-static void
-MultiTableSnapshotInputFormat.setInput(org.apache.hadoop.conf.Configuration configuration,
-http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection> snapshotScans,
-org.apache.hadoop.fs.Path tmpRestoreDir) 
-
-
 void
 MultiTableSnapshotInputFormatImpl.setInput(org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection> snapshotScans,
@@ -1388,6 +1382,12 @@ service.
  restoreDir.
 
 
+
+static void
+MultiTableSnapshotInputFormat.setInput(org.apache.hadoop.conf.Configuration configuration,
+http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection> snapshotScans,
+org.apache.hadoop.fs.Path tmpRestoreDir) 
+
 
 protected void
 MultiTableInputFormatBase.setScans(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scans)
@@ -1598,19 +1598,19 @@ service.
 
 
 protected KeyValueScanner
-HMobStore.createScanner(Scan scan,
+HStore.createScanner(Scan scan,
   http://docs.oracle.com/javase/7/docs/

[33/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 6fb4ffc..7fa1209 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -313,11 +313,11 @@ service.
 
 
 private TableName
-HRegionInfo.tableName 
+MetaTableAccessor.TableVisitorBase.tableName 
 
 
 private TableName
-MetaTableAccessor.TableVisitorBase.tableName 
+HRegionInfo.tableName 
 
 
 
@@ -758,52 +758,52 @@ service.
 
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName 
+private TableName
+HRegionLocator.tableName 
 
 
-protected TableName
-RegionServerCallable.tableName 
+private TableName
+ScannerCallableWithReplicas.tableName 
 
 
 private TableName
-TableState.tableName 
+HTable.tableName 
 
 
 private TableName
-ScannerCallableWithReplicas.tableName 
+AsyncProcess.AsyncRequestFutureImpl.tableName 
 
 
-private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName 
+protected TableName
+RegionServerCallable.tableName 
 
 
 private TableName
 HBaseAdmin.TableFuture.tableName 
 
 
-private TableName
-BufferedMutatorImpl.tableName 
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName 
 
 
 private TableName
-ClientScanner.tableName 
+BufferedMutatorImpl.tableName 
 
 
-protected TableName
-RegionAdminServiceCallable.tableName 
+private TableName
+BufferedMutatorParams.tableName 
 
 
-private TableName
-HTable.tableName 
+protected TableName
+RegionAdminServiceCallable.tableName 
 
 
 private TableName
-HRegionLocator.tableName 
+ClientScanner.tableName 
 
 
 private TableName
-BufferedMutatorParams.tableName 
+TableState.tableName 
 
 
 
@@ -843,11 +843,11 @@ service.
 
 
 TableName
-BufferedMutatorImpl.getName() 
+HRegionLocator.getName() 
 
 
 TableName
-RegionLocator.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
@@ -857,13 +857,13 @@ service.
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
-
+BufferedMutatorImpl.getName() 
 
 
 TableName
-HRegionLocator.getName() 
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
@@ -878,36 +878,42 @@ service.
 RegionServerCallable.getTableName() 
 
 
-TableName
-TableState.getTableName()
-Table name for state
-
-
-
 protected TableName
 HBaseAdmin.TableFuture.getTableName() 
 
-
+
 TableName
 BufferedMutatorParams.getTableName() 
 
+
+TableName
+TableState.getTableName()
+Table name for state
+
+
 
 private TableName
 HBaseAdmin.getTableNameBeforeRestoreSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String snapshotName) 
 
 
 TableName[]
-HBaseAdmin.listTableNames() 
+Admin.listTableNames()
+List all of the names of userspace tables.
+
 
 
 TableName[]
+HBaseAdmin.listTableNames() 
+
+
+TableName[]
 HConnection.listTableNames()
 Deprecated. 
 Use Admin.listTables()
 instead.
 
 
 
-
+
 TableName[]
 ConnectionImplementation.listTableNames()
 Deprecated. 
@@ -915,66 +921,60 @@ service.
 
 
 
-
-TableName[]
-Admin.listTableNames()
-List all of the names of userspace tables.
-
-
 
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
-
-
-TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
-boolean includeSysTables) 
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
 
-
+
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
 boolean includeSysTables)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern,
+ 

[13/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.html
index 00a9484..e0a2def 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.html
@@ -117,15 +117,15 @@
 
 
 RegionCoprocessorHost
-Store.getCoprocessorHost() 
+Region.getCoprocessorHost() 
 
 
 RegionCoprocessorHost
-Region.getCoprocessorHost() 
+HRegion.getCoprocessorHost() 
 
 
 RegionCoprocessorHost
-HRegion.getCoprocessorHost() 
+Store.getCoprocessorHost() 
 
 
 RegionCoprocessorHost

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerAccounting.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerAccounting.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerAccounting.html
index 627e423..0a6ed12 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerAccounting.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerAccounting.html
@@ -96,11 +96,11 @@
 
 
 private RegionServerAccounting
-HRegionServer.regionServerAccounting 
+HeapMemoryManager.regionServerAccounting 
 
 
 private RegionServerAccounting
-HeapMemoryManager.regionServerAccounting 
+HRegionServer.regionServerAccounting 
 
 
 private RegionServerAccounting

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
index 8cc6f5b..5ea5e15 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
@@ -344,29 +344,29 @@
 
 
 (package private) RegionServerServices
-HRegion.rsServices 
+RegionCoprocessorHost.rsServices
+The region server services
+
 
 
 private RegionServerServices
-RegionMergeTransactionImpl.rsServices 
+RegionCoprocessorHost.RegionEnvironment.rsServices 
 
 
 private RegionServerServices
-SplitTransactionImpl.rsServices 
+RegionServerCoprocessorHost.rsServices 
 
 
-private RegionServerServices
-RegionServerCoprocessorHost.rsServices 
+(package private) RegionServerServices
+HRegion.rsServices 
 
 
-(package private) RegionServerServices
-RegionCoprocessorHost.rsServices
-The region server services
-
+private RegionServerServices
+RegionMergeTransactionImpl.rsServices 
 
 
 private RegionServerServices
-RegionCoprocessorHost.RegionEnvironment.rsServices 
+SplitTransactionImpl.rsServices 
 
 
 private RegionServerServices
@@ -387,23 +387,19 @@
 
 
 RegionServerServices
-SplitTransaction.getRegionServerServices()
-Get the RegonServerServices of the server running the 
transaction or rollback
-
+RegionCoprocessorHost.RegionEnvironment.getRegionServerServices() 
 
 
-(package private) RegionServerServices
-HRegion.getRegionServerServices() 
+RegionServerServices
+RegionServerCoprocessorHost.RegionServerEnvironment.getRegionServerServices() 
 
 
-RegionServerServices
-RegionMergeTransactionImpl.getRegionServerServices() 
+(package private) RegionServerServices
+HRegion.getRegionServerServices() 
 
 
 RegionServerServices
-RegionMergeTransaction.getRegionServerServices()
-Get the RegonServerServices of the server running the 
transaction or rollback
-
+RegionMergeTransactionImpl.getRegionServerServices() 
 
 
 RegionServerServices
@@ -411,11 +407,15 @@
 
 
 RegionServerServices
-RegionServerCoprocessorHost.RegionServerEnvironment.getRegionServerServices() 
+RegionMergeTransaction.getRegionServerServices()
+Get the RegonServerServices of the server running the 
transaction or rollback
+
 
 
 RegionServerServices
-RegionCoprocessorHost.RegionEnvironment.getRegionServerServices() 
+SplitTransaction.getRegionServerServices()
+Get the RegonServerServices of the server running the 
transaction or rollback
+
 
 
 
@@ -452,19 +452,15 @@
 
 
 
-PairOfSameType
-SplitTransaction.execute(Server server,
-  RegionServerServices services)
-Deprecated. 
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
-
-
-
-
 Region
 RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services) 
 
+
+PairOfSameType<

[46/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 1c23652..1380982 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,123 +116,123 @@
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRange src) 
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
 OrderedNumeric.decode(PositionedByteRange src) 
 
-
-T
-DataType.decode(PositionedByteRange src)
-Read an instance of T from the buffer 
src.
-
-
 
 byte[]
-OrderedBlobVar.decode(PositionedByteRange src) 
+RawBytes.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
 RawInteger.decode(PositionedByteRange src) 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+T
+TerminatedWrapper.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
 OrderedFloat32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+RawDouble.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRange src) 
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 RawLong.decode(PositionedByteRange src) 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRange src) 
+
 
-T
-TerminatedWrapper.decode(PositionedByteRange src) 
+byte[]
+OrderedBlobVar.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRange src) 
 
 
 byte[]
 OrderedBlob.decode(PositionedByteRange src) 
 
 
+T
+DataType.decode(PositionedByteRange src)
+Read an instance of T from the buffer 
src.
+
+
+
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
+
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRang

[35/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 5e203ed..260bca1 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -248,11 +248,11 @@
 
 
 ServerName
-SplitLogTask.getServerName() 
+Server.getServerName() 
 
 
 ServerName
-Server.getServerName() 
+SplitLogTask.getServerName() 
 
 
 static ServerName
@@ -591,11 +591,11 @@
 
 
 private ServerName
-AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
+FastFailInterceptorContext.server 
 
 
 private ServerName
-FastFailInterceptorContext.server 
+AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
 
 
 private ServerName
@@ -698,16 +698,16 @@
 
 
 
-void
-MetaCache.cacheLocation(TableName tableName,
+private void
+ConnectionImplementation.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableName tableName,
+void
+MetaCache.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
@@ -754,16 +754,16 @@
 
 
 void
-HBaseAdmin.closeRegion(ServerName sn,
-  HRegionInfo hri) 
-
-
-void
 Admin.closeRegion(ServerName sn,
   HRegionInfo hri)
 Close a region.
 
 
+
+void
+HBaseAdmin.closeRegion(ServerName sn,
+  HRegionInfo hri) 
+
 
 private void
 HBaseAdmin.compact(ServerName sn,
@@ -773,29 +773,29 @@
 
 
 void
-HBaseAdmin.compactRegionServer(ServerName sn,
+Admin.compactRegionServer(ServerName sn,
   boolean major)
 Compact all regions on the region server
 
 
 
 void
-Admin.compactRegionServer(ServerName sn,
+HBaseAdmin.compactRegionServer(ServerName sn,
   boolean major)
 Compact all regions on the region server
 
 
 
 CoprocessorRpcChannel
-HBaseAdmin.coprocessorService(ServerName sn) 
-
-
-CoprocessorRpcChannel
 Admin.coprocessorService(ServerName sn)
 Creates and returns a RpcChannel instance
  connected to the passed region server.
 
 
+
+CoprocessorRpcChannel
+HBaseAdmin.coprocessorService(ServerName sn) 
+
 
 protected MultiServerCallable
 AsyncProcess.createCallable(ServerName server,
@@ -904,14 +904,14 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.getOnlineRegions(ServerName sn) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 Admin.getOnlineRegions(ServerName sn)
 Get all the online regions on a region server.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+HBaseAdmin.getOnlineRegions(ServerName sn) 
+
 
 (package private) ServerStatistics
 ServerStatisticTracker.getServerStatsForTesting(ServerName server) 
@@ -965,13 +965,13 @@
 
 
 boolean
-ClusterStatusListener.isDeadServer(ServerName sn)
-Check if we know if a server is dead.
-
+ConnectionImplementation.isDeadServer(ServerName sn) 
 
 
 boolean
-ConnectionImplementation.isDeadServer(ServerName sn) 
+ClusterStatusListener.isDeadServer(ServerName sn)
+Check if we know if a server is dead.
+
 
 
 protected boolean
@@ -1039,14 +1039,14 @@
 
 
 void
-HBaseAdmin.rollWALWriter(ServerName serverName) 
-
-
-void
 Admin.rollWALWriter(ServerName serverName)
 Roll the log writer.
 
 
+
+void
+HBaseAdmin.rollWALWriter(ServerName serverName) 
+
 
 private 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse
 HBaseAdmin.rollWALWriterImpl(ServerName sn) 
@@ -1123,15 +1123,15 @@
 
 
 void
-HBaseAdmin.updateConfiguration(ServerName server) 
-
-
-void
 Admin.updateConfiguration(ServerName server)
 Update the configuration and trigger an online config change
  on the regionserver
 
 
+
+void
+HBaseAdmin.updateConfiguration(ServerName server) 
+
 
 private void
 PreemptiveFastFailInterceptor.updateFailureInfoForServer(ServerName server,
@@ -1151,18 +1151,18 @@
 
 
 void
-MetricsConnection.updateRegionStats(ServerName serverName,
-  byte[] regionName,
-  
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats stats) 
-
-
-void
 StatisticTrackable.updateRegionStats(ServerName server,
   byte[] region,
  

[28/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/class-use/PrefixTreeArraySearcher.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/class-use/PrefixTreeArraySearcher.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/class-use/PrefixTreeArraySearcher.html
index 66bd87a..817ba3e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/class-use/PrefixTreeArraySearcher.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/class-use/PrefixTreeArraySearcher.html
@@ -130,15 +130,15 @@
 
 
 
-static PrefixTreeArraySearcher
-DecoderFactory.checkOut(ByteBuff buffer,
-boolean includeMvccVersion) 
-
-
 PrefixTreeArraySearcher
 ArraySearcherPool.checkOut(ByteBuff buffer,
 boolean includesMvccVersion) 
 
+
+static PrefixTreeArraySearcher
+DecoderFactory.checkOut(ByteBuff buffer,
+boolean includeMvccVersion) 
+
 
 static PrefixTreeArraySearcher
 DecoderFactory.ensureArraySearcherValid(ByteBuff buffer,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
index 7d01f4b..5b2ec98 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/class-use/PrefixTreeEncoder.html
@@ -201,13 +201,13 @@
 
 
 protected PrefixTreeEncoder
-RowNodeWriter.prefixTreeEncoder
+RowSectionWriter.prefixTreeEncoder
 fields
 
 
 
 protected PrefixTreeEncoder
-RowSectionWriter.prefixTreeEncoder
+RowNodeWriter.prefixTreeEncoder
 fields
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
index a741ea2..2b8a872 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/encode/other/class-use/ColumnNodeType.html
@@ -145,13 +145,13 @@
 
 
 
-private ColumnNodeType
-ColumnSectionWriter.nodeType 
-
-
 protected ColumnNodeType
 ColumnNodeWriter.nodeType 
 
+
+private ColumnNodeType
+ColumnSectionWriter.nodeType 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.SplitLogManagerDetails.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.SplitLogManagerDetails.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.SplitLogManagerDetails.html
index ec419a4..cf0bbf9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.SplitLogManagerDetails.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.SplitLogManagerDetails.html
@@ -122,14 +122,14 @@
 
 
 SplitLogManagerCoordination.SplitLogManagerDetails
-ZKSplitLogManagerCoordination.getDetails() 
-
-
-SplitLogManagerCoordination.SplitLogManagerDetails
 SplitLogManagerCoordination.getDetails()
 Returns the configuration that was provided previously
 
 
+
+SplitLogManagerCoordination.SplitLogManagerDetails
+ZKSplitLogManagerCoordination.getDetails() 
+
 
 
 
@@ -141,14 +141,14 @@
 
 
 void
-ZKSplitLogManagerCoordination.setDetails(SplitLogManagerCoordination.SplitLogManagerDetails details) 
-
-
-void
 SplitLogManagerCoordination.setDetails(SplitLogManagerCoordination.SplitLogManagerDetails details)
 Provide the configuration from the SplitLogManager
 
 
+
+void
+ZKSplitLogManagerCoordination.setDetails(SplitLogManagerCoordination.SplitLogManagerDetails details) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogManagerCoordination.html
index a17932c..d9ec8ba 100644
--- 

[50/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 562f488..2abbccd 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -913,9 +913,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Put
-Put.add(Cell kv)
-Add the specified KeyValue to this Put operation.
+Append
+Append.add(Cell cell)
+Add column and value to this Append operation.
 
 
 
@@ -925,9 +925,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.add(Cell cell)
-Add column and value to this Append operation.
+Put
+Put.add(Cell kv)
+Add the specified KeyValue to this Put operation.
 
 
 
@@ -1007,27 +1007,27 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 boolean partial) 
 
 
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
-
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
 
 
@@ -1044,7 +1044,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cell cell) 
+ColumnRangeFilter.getNextCellHint(Cell cell) 
 
 
 abstract Cell
@@ -1055,33 +1055,33 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
+ColumnPaginationFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-FilterList.getNextCellHint(Cell currentCell) 
+ColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-TimestampsFilter.getNextCellHint(Cell currentCell)
-Pick the next cell that the scanner should seek to.
-
+FilterList.getNextCellHint(Cell currentCell) 
 
 
 Cell
-MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
+MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cell cell) 
+TimestampsFilter.getNextCellHint(Cell currentCell)
+Pick the next cell that the scanner should seek to.
+
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cell cell) 
+FuzzyRowFilter.getNextCellHint(Cell currentCell) 
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(Cell currentCell) 
+MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
 
 
 abstract Cell
@@ -1095,15 +10

[37/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index e9a6b35..4f4468b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -159,13 +159,13 @@ the order they are declared.
 
 
 private KeepDeletedCells
-ScanQueryMatcher.keepDeletedCells
-whether to return deleted rows
-
+ScanInfo.keepDeletedCells 
 
 
 private KeepDeletedCells
-ScanInfo.keepDeletedCells 
+ScanQueryMatcher.keepDeletedCells
+whether to return deleted rows
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
index 6cbd252..025a9fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
@@ -122,11 +122,11 @@
 
 
 private KeyValue.KeyOnlyKeyValue
-StoreFileWriter.lastBloomKeyOnlyKV 
+StoreFileReader.lastBloomKeyOnlyKV 
 
 
 private KeyValue.KeyOnlyKeyValue
-StoreFileReader.lastBloomKeyOnlyKV 
+StoreFileWriter.lastBloomKeyOnlyKV 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index e83a300..e034c65 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -820,15 +820,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
 See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
  for details on this methods.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
-
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
@@ -839,6 +839,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
+  KeyValue targetKey,
+  Cell candidate) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   KeyValue targetKey,
   Cell candidate)
@@ -847,12 +853,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  for details on this methods.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-

[41/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index df129f8..9659e06 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -459,38 +459,38 @@ service.
 
 
 void
-HBaseAdmin.addColumn(TableName tableName,
+Admin.addColumn(TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 void
-Admin.addColumn(TableName tableName,
+HBaseAdmin.addColumn(TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-HBaseAdmin.addColumnFamily(TableName tableName,
-  HColumnDescriptor columnFamily) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.addColumnFamily(TableName tableName,
   HColumnDescriptor columnFamily)
 Add a column family to an existing table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
+HBaseAdmin.addColumnFamily(TableName tableName,
+  HColumnDescriptor columnFamily) 
+
 
 UnmodifyableHTableDescriptor
 UnmodifyableHTableDescriptor.addFamily(HColumnDescriptor family)
@@ -499,38 +499,38 @@ service.
 
 
 void
-HBaseAdmin.modifyColumn(TableName tableName,
+Admin.modifyColumn(TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
-As of 2.0. Will be removed in 3.0. Use
- HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 void
-Admin.modifyColumn(TableName tableName,
+HBaseAdmin.modifyColumn(TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
+As of 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-HBaseAdmin.modifyColumnFamily(TableName tableName,
-HColumnDescriptor columnFamily) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.modifyColumnFamily(TableName tableName,
 HColumnDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">Future

[47/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
index ccc396d..910b506 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
@@ -242,18 +242,18 @@
 
 
 ByteRange
-ByteRange.deepCopy()
-Create a new ByteRange with new backing byte[] 
containing a copy
- of the content from this range's window.
-
+SimpleMutableByteRange.deepCopy() 
 
 
 ByteRange
-SimpleMutableByteRange.deepCopy() 
+SimpleByteRange.deepCopy() 
 
 
 ByteRange
-SimpleByteRange.deepCopy() 
+ByteRange.deepCopy()
+Create a new ByteRange with new backing byte[] 
containing a copy
+ of the content from this range's window.
+
 
 
 ByteRange
@@ -273,6 +273,16 @@
 
 
 ByteRange
+SimpleMutableByteRange.put(int index,
+  byte val) 
+
+
+ByteRange
+SimpleByteRange.put(int index,
+  byte val) 
+
+
+ByteRange
 ByteRange.put(int index,
   byte val)
 Store val at index.
@@ -280,13 +290,13 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
-  byte val) 
+SimpleMutableByteRange.put(int index,
+  byte[] val) 
 
 
 ByteRange
-SimpleByteRange.put(int index,
-  byte val) 
+SimpleByteRange.put(int index,
+  byte[] val) 
 
 
 ByteRange
@@ -297,13 +307,17 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
-  byte[] val) 
+SimpleMutableByteRange.put(int index,
+  byte[] val,
+  int offset,
+  int length) 
 
 
 ByteRange
-SimpleByteRange.put(int index,
-  byte[] val) 
+SimpleByteRange.put(int index,
+  byte[] val,
+  int offset,
+  int length) 
 
 
 ByteRange
@@ -317,17 +331,13 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
-  byte[] val,
-  int offset,
-  int length) 
+SimpleMutableByteRange.putInt(int index,
+int val) 
 
 
 ByteRange
-SimpleByteRange.put(int index,
-  byte[] val,
-  int offset,
-  int length) 
+SimpleByteRange.putInt(int index,
+int val) 
 
 
 ByteRange
@@ -338,13 +348,13 @@
 
 
 ByteRange
-SimpleMutableByteRange.putInt(int index,
-int val) 
+SimpleMutableByteRange.putLong(int index,
+  long val) 
 
 
 ByteRange
-SimpleByteRange.putInt(int index,
-int val) 
+SimpleByteRange.putLong(int index,
+  long val) 
 
 
 ByteRange
@@ -355,13 +365,13 @@
 
 
 ByteRange
-SimpleMutableByteRange.putLong(int index,
-  long val) 
+SimpleMutableByteRange.putShort(int index,
+short val) 
 
 
 ByteRange
-SimpleByteRange.putLong(int index,
-  long val) 
+SimpleByteRange.putShort(int index,
+short val) 
 
 
 ByteRange
@@ -372,25 +382,21 @@
 
 
 ByteRange
-SimpleMutableByteRange.putShort(int index,
-short val) 
+SimpleByteRange.set(byte[] bytes) 
 
 
 ByteRange
-SimpleByteRange.putShort(int index,
-short val) 
-
-
-ByteRange
 ByteRange.set(byte[] bytes)
 Reuse this ByteRange over a new byte[].
 
 
-
+
 ByteRange
-SimpleByteRange.set(byte[] bytes) 
+SimpleByteRange.set(byte[] bytes,
+  int offset,
+  int length) 
 
-
+
 ByteRange
 ByteRange.set(byte[] bytes,
   int offset,
@@ -398,22 +404,16 @@
 Reuse this ByteRange over a new byte[].
 
 
-
+
 ByteRange
-SimpleByteRange.set(byte[] bytes,
-  int offset,
-  int length) 
+SimpleByteRange.set(int capacity) 
 
-
+
 ByteRange
 ByteRange.set(int capacity)
 Reuse this ByteRange over a new byte[].
 
 
-
-ByteRange
-SimpleByteRange.set(int capacity) 
-
 
 ByteRange
 ByteRange.setLength(int length)
@@ -428,17 +428,27 @@
 
 
 ByteRange
+SimpleMutableByteRange.shallowCopy() 
+
+
+ByteRange
+SimpleByteRange.shallowCopy() 
+
+
+ByteRange
 ByteRange.shallowCopy()
 Create a new ByteRange that points at this 
range's byte[].
 
 
 
 ByteRange
-SimpleMutableByteRange.shallowCopy() 
+SimpleMutableByteRange.shallowCopySubRange(int innerOffset,
+  int copyLength) 
 
 
 ByteRange
-SimpleByteRange.shallowCopy() 
+SimpleByteRange.shallowCopySubRange(int innerOffset,
+  int copyLength) 
 
 
 ByteRange
@@ -449,13 +459,11 @@
 
 
 ByteRange
-SimpleMutableByteRange.shallowCopySubRange(int innerOffset,
-  int copyLength) 
+SimpleMutableByteRange.unset() 
 
 
 ByteRange
-SimpleByteRange.shallowCopySubRange(int innerOffset,
-  int copyLength) 
+SimpleByteRange.unset() 
 
 
 ByteRange
@@ -463,14 +471,6 @@
 Nullifies this ByteRange.
 
 
-
-ByteRange
-SimpleMutableByteRange.unset() 
-
-
-ByteRange
-SimpleByteRange.unset() 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html

[18/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index dd304df..3eb53ae 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -122,13 +122,13 @@
 
 
 void
-BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
   long procId) 
 
 
 void
-BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
   long procId) 
 
@@ -196,124 +196,124 @@
 
 
 
-boolean
-CreateTableProcedure.abort(MasterProcedureEnv env) 
+protected boolean
+ServerCrashProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-TruncateTableProcedure.abort(MasterProcedureEnv env) 
+ModifyTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteNamespaceProcedure.abort(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-EnableTableProcedure.abort(MasterProcedureEnv env) 
+DisableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+EnableTableProcedure.abort(MasterProcedureEnv env) 
 
 
-protected boolean
-ServerCrashProcedure.abort(MasterProcedureEnv env) 
+boolean
+DeleteNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DisableTableProcedure.abort(MasterProcedureEnv env) 
+ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyTableProcedure.abort(MasterProcedureEnv env) 
+CreateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteTableProcedure.abort(MasterProcedureEnv env) 
+TruncateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
+DeleteTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
+ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
+ServerCrashProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-TruncateTableProcedure.acquireLock(MasterProcedureEnv env) 
+ModifyTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
+DisableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-AddColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ServerCrashProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DisableTableProcedure.acquireLock(MasterProcedureEnv env) 
+ModifyColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyTableProcedure.acquireLock(MasterProcedureEnv env) 
+CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
+TruncateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected

[39/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 2b43da0..023be44 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -298,11 +298,11 @@ service.
 
 
 protected HRegionLocation
-RegionServerCallable.getLocation() 
+MultiServerCallable.getLocation() 
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation() 
+RegionServerCallable.getLocation() 
 
 
 HRegionLocation
@@ -310,26 +310,26 @@ service.
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row)
+HRegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row)
+RegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row,
+HRegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row,
+RegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
@@ -479,13 +479,13 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-RegionLocator.getAllRegionLocations()
-Retrieves all of the regions associated with this 
table.
-
+HRegionLocator.getAllRegionLocations() 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HRegionLocator.getAllRegionLocations() 
+RegionLocator.getAllRegionLocations()
+Retrieves all of the regions associated with this 
table.
+
 
 
 private PairList,http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List>
@@ -594,16 +594,16 @@ service.
 
 
 
-void
-MetaCache.cacheLocation(TableName tableName,
+private void
+ConnectionImplementation.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableName tableName,
+void
+MetaCache.cacheLocation(TableName tableName,
   ServerName source,
   HRegionLocation location)
 Put a newly discovered HRegionLocation into the cache.



[05/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index c53c40f..c208e8c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -379,22 +379,22 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 private HMaster m_master
 
 
-
+
 
 
 
 
-m_format
-private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String m_format
+m_catalogJanitorEnabled
+private boolean m_catalogJanitorEnabled
 
 
-
+
 
 
 
 
-m_format__IsNotDefault
-private boolean m_format__IsNotDefault
+m_catalogJanitorEnabled__IsNotDefault
+private boolean m_catalogJanitorEnabled__IsNotDefault
 
 
 
@@ -415,130 +415,130 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 private boolean m_deadServers__IsNotDefault
 
 
-
+
 
 
 
 
-m_serverManager
-private ServerManager m_serverManager
+m_frags
+private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer> m_frags
 
 
-
+
 
 
 
 
-m_serverManager__IsNotDefault
-private boolean m_serverManager__IsNotDefault
+m_frags__IsNotDefault
+private boolean m_frags__IsNotDefault
 
 
-
+
 
 
 
 
-m_frags
-private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer> m_frags
+m_assignmentManager
+private AssignmentManager m_assignmentManager
 
 
-
+
 
 
 
 
-m_frags__IsNotDefault
-private boolean m_frags__IsNotDefault
+m_assignmentManager__IsNotDefault
+private boolean m_assignmentManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_servers
-private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List m_servers
+m_format
+private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String m_format
 
 
-
+
 
 
 
 
-m_servers__IsNotDefault
-private boolean m_servers__IsNotDefault
+m_format__IsNotDefault
+private boolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled
-private boolean m_catalogJanitorEnabled
+m_servers
+private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List m_servers
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled__IsNotDefault
-private boolean m_catalogJanitorEnabled__IsNotDefault
+m_servers__IsNotDefault
+private boolean m_servers__IsNotDefault
 
 
-
+
 
 
 
 
-m_assignmentManager
-private AssignmentManager m_assignmentManager
+m_serverManager
+private ServerManager m_serverManager
 
 
-
+
 
 
 
 
-m_assignmentManager__IsNotDefault
-private boolean m_assignmentManager__IsNotDefault
+m_serverManager__IsNotDefault
+private boolean m_serverManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_metaLocation
-private ServerName m_metaLocation
+m_filter
+private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String m_filter
 
 
-
+
 
 
 
 
-m_metaLocation__IsNotDefault
-private boolean m_metaLocation__IsNotDefault
+m_filter__IsNotDefault
+private boolean m_filter__IsNotDefault
 
 
-
+
 
 
 
 
-m_filter
-private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String m_filter
+m_metaLocation
+private ServerName m_metaLocation
 
 
-
+
 
 
 
 
-m_filter__IsNotDefault
-private boolean m_filter__IsNotDefault
+m_metaLocation__IsNotDefault
+private boolean m_metaLocation__IsNotDefault
 
 
 
@@ -584,31 +584,31 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 public HMaster getMaster()
 
 
-
+
 
 
 
 
-setFormat
-public void setFormat(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String format)
+setCatalogJanitorEnabled
+public void setCatalogJanitorEnabled(boolean catalogJanitorEnabled)
 
 
-
+
 
 
 
 
-getFormat
-public http://docs.oracle.com/javase/7/docs/api/java/lang/String

[08/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeer.PeerState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeer.PeerState.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeer.PeerState.html
index 12300c2..8958044 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeer.PeerState.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeer.PeerState.html
@@ -111,14 +111,14 @@
 
 
 ReplicationPeer.PeerState
-ReplicationPeerZKImpl.getPeerState() 
-
-
-ReplicationPeer.PeerState
 ReplicationPeer.getPeerState()
 Returns the state of the peer
 
 
+
+ReplicationPeer.PeerState
+ReplicationPeerZKImpl.getPeerState() 
+
 
 static ReplicationPeer.PeerState
 ReplicationPeer.PeerState.valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
index 4399595..833edc7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfig.html
@@ -228,13 +228,13 @@
 
 
 ReplicationPeerConfig
-ReplicationPeerZKImpl.getPeerConfig()
+ReplicationPeer.getPeerConfig()
 Get the peer config object
 
 
 
 ReplicationPeerConfig
-ReplicationPeer.getPeerConfig()
+ReplicationPeerZKImpl.getPeerConfig()
 Get the peer config object
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfigListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfigListener.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfigListener.html
index 1ead7aa..64d1957 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfigListener.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeerConfigListener.html
@@ -161,11 +161,11 @@
 
 
 void
-ReplicationPeerZKImpl.trackPeerConfigChanges(ReplicationPeerConfigListener listener) 
+ReplicationPeer.trackPeerConfigChanges(ReplicationPeerConfigListener listener) 
 
 
 void
-ReplicationPeer.trackPeerConfigChanges(ReplicationPeerConfigListener listener) 
+ReplicationPeerZKImpl.trackPeerConfigChanges(ReplicationPeerConfigListener listener) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
index ad704a7..2ebb71f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationPeers.html
@@ -215,15 +215,15 @@
 
 
 private ReplicationPeers
-ReplicationSourceManager.replicationPeers 
+ReplicationSource.replicationPeers 
 
 
 private ReplicationPeers
-Replication.replicationPeers 
+ReplicationSourceManager.replicationPeers 
 
 
 private ReplicationPeers
-ReplicationSource.replicationPeers 
+Replication.replicationPeers 
 
 
 private ReplicationPeers
@@ -255,7 +255,7 @@
 
 
 void
-ReplicationSourceInterface.init(org.apache.hadoop.conf.Configuration conf,
+ReplicationSource.init(org.apache.hadoop.conf.Configuration conf,
 org.apache.hadoop.fs.FileSystem fs,
 ReplicationSourceManager manager,
 ReplicationQueues replicationQueues,
@@ -265,12 +265,12 @@
 http://docs.oracle.com/javase/7/docs/api/java/util/UUID.html?is-external=true";
 title="class or interface in java.util">UUID clusterId,
 ReplicationEndpoint replicationEndpoint,
 MetricsSource metrics)
-Initializer for the source
+Instantiation method used by region servers
 
 
 
 void
-ReplicationSource.init(org.apache.hadoop.conf.Configuration conf,
+ReplicationSourceInterface.init(org.apache.hadoop.conf.Configuration conf,
 org.apache.hadoop.fs.FileSystem fs,
 ReplicationSourceManager manager,
 ReplicationQueues replicationQueues,
@@ -280,7 +280,7 @@
 http://docs.oracle.com/javase/7/docs/api/java/

[17/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
index c484e93..7bbdf21 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
@@ -96,65 +96,65 @@
 
 
 TableProcedureInterface.TableOperationType
-CreateTableProcedure.getTableOperationType() 
+ModifyTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TruncateTableProcedure.getTableOperationType() 
+AddColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DeleteNamespaceProcedure.getTableOperationType() 
+RestoreSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TableProcedureInterface.getTableOperationType()
-Given an operation type we can take decisions about what to 
do with pending operations.
-
+DeleteColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyNamespaceProcedure.getTableOperationType() 
+DisableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CreateNamespaceProcedure.getTableOperationType() 
+EnableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-EnableTableProcedure.getTableOperationType() 
+DeleteNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-AddColumnFamilyProcedure.getTableOperationType() 
+ModifyColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DisableTableProcedure.getTableOperationType() 
+CreateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyTableProcedure.getTableOperationType() 
+TableProcedureInterface.getTableOperationType()
+Given an operation type we can take decisions about what to 
do with pending operations.
+
 
 
 TableProcedureInterface.TableOperationType
-DeleteTableProcedure.getTableOperationType() 
+TruncateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DeleteColumnFamilyProcedure.getTableOperationType() 
+CreateNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CloneSnapshotProcedure.getTableOperationType() 
+DeleteTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyColumnFamilyProcedure.getTableOperationType() 
+CloneSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-RestoreSnapshotProcedure.getTableOperationType() 
+ModifyNamespaceProcedure.getTableOperationType() 
 
 
 static TableProcedureInterface.TableOperationType

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html 
b/devapidocs/org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html
index 73013a4..706c044 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html
@@ -246,7 +246,7 @@ the order they are declared.
 
 
 values
-public static MonitoredTask.State[] values()
+public static MonitoredTask.State[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -263,7 +263,7 @@ for (MonitoredTask.State c : MonitoredTask.State.values())
 
 
 valueOf
-public static MonitoredTask.State valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static MonitoredTask.State valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/monitoring/class-use/MonitoredRPCHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase

[43/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 3dd7498..1e61e56 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -606,6 +606,16 @@ service.
 
 
 int
+CellComparator.compare(Cell a,
+  Cell b) 
+
+
+int
+CellComparator.RowComparator.compare(Cell a,
+  Cell b) 
+
+
+int
 KeyValue.MetaComparator.compare(Cell left,
   Cell right)
 Deprecated. 
@@ -630,16 +640,6 @@ service.
  
 
 
-int
-CellComparator.compare(Cell a,
-  Cell b) 
-
-
-int
-CellComparator.RowComparator.compare(Cell a,
-  Cell b) 
-
-
 private int
 CellComparator.compare(Cell a,
   Cell b,
@@ -815,37 +815,37 @@ service.
 
 
 int
-KeyValue.KVComparator.compareRows(Cell left,
-  Cell right)
-Deprecated. 
- 
-
-
-int
 CellComparator.compareRows(Cell left,
   Cell right)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparator.MetaCellComparator.compareRows(Cell left,
   Cell right) 
 
-
+
 int
-KeyValue.KVComparator.compareTimestamps(Cell left,
-  Cell right)
+KeyValue.KVComparator.compareRows(Cell left,
+  Cell right)
 Deprecated. 
  
 
-
+
 static int
 CellComparator.compareTimestamps(Cell left,
   Cell right)
 Compares cell's timestamps in DESCENDING order.
 
 
+
+int
+KeyValue.KVComparator.compareTimestamps(Cell left,
+  Cell right)
+Deprecated. 
+ 
+
 
 static int
 CellComparator.compareValue(Cell cell,
@@ -1645,23 +1645,23 @@ service.
 
 
 
-Increment
-Increment.add(Cell cell)
-Add the specified KeyValue to this operation.
-
-
-
 Append
 Append.add(Cell cell)
 Add column and value to this Append operation.
 
 
-
+
 Put
 Put.add(Cell kv)
 Add the specified KeyValue to this Put operation.
 
 
+
+Increment
+Increment.add(Cell cell)
+Add the specified KeyValue to this operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1750,6 +1750,14 @@ service.
 boolean partial) 
 
 
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
@@ -1763,14 +1771,6 @@ service.
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
-
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
 
 
@@ -1808,17 +1808,17 @@ service.
 Cell kv) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-DoubleColumnInterpreter.getValue(byte[] colFamily,
-byte[] colQualifier,
-Cell c) 
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 LongColumnInterpreter.getValue(byte[] colFamily,
 byte[] colQualifier,
 Cell kv) 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Doub

[04/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/util/class-use/BloomFilterWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/BloomFilterWriter.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/BloomFilterWriter.html
index 7a3cab6..e474b9b 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/BloomFilterWriter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/BloomFilterWriter.html
@@ -129,25 +129,25 @@
 
 
 void
-HFileWriterImpl.addDeleteFamilyBloomFilter(BloomFilterWriter bfw) 
-
-
-void
 HFile.Writer.addDeleteFamilyBloomFilter(BloomFilterWriter bfw)
 Store delete family Bloom filter in the file, which is only 
supported in
  HFile V2.
 
 
-
+
 void
-HFileWriterImpl.addGeneralBloomFilter(BloomFilterWriter bfw) 
+HFileWriterImpl.addDeleteFamilyBloomFilter(BloomFilterWriter bfw) 
 
-
+
 void
 HFile.Writer.addGeneralBloomFilter(BloomFilterWriter bfw)
 Store general Bloom filter in the file.
 
 
+
+void
+HFileWriterImpl.addGeneralBloomFilter(BloomFilterWriter bfw) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteBufferArray.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteBufferArray.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteBufferArray.html
index 8a379c8..09ba24d 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteBufferArray.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteBufferArray.html
@@ -99,11 +99,11 @@
 
 
 private ByteBufferArray
-FileMmapEngine.bufferArray 
+ByteBufferIOEngine.bufferArray 
 
 
 private ByteBufferArray
-ByteBufferIOEngine.bufferArray 
+FileMmapEngine.bufferArray 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
index 8b81cf9..3926ea9 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
@@ -561,11 +561,11 @@
 
 
 ByteRange
-SimpleMutableByteRange.deepCopy() 
+SimpleByteRange.deepCopy() 
 
 
 ByteRange
-SimpleByteRange.deepCopy() 
+SimpleMutableByteRange.deepCopy() 
 
 
 ByteRange
@@ -604,17 +604,17 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
+SimpleByteRange.put(int index,
   byte val) 
 
 
-ByteRange
-SimpleByteRange.put(int index,
+abstract ByteRange
+AbstractByteRange.put(int index,
   byte val) 
 
 
-abstract ByteRange
-AbstractByteRange.put(int index,
+ByteRange
+SimpleMutableByteRange.put(int index,
   byte val) 
 
 
@@ -626,17 +626,17 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
+SimpleByteRange.put(int index,
   byte[] val) 
 
 
-ByteRange
-SimpleByteRange.put(int index,
+abstract ByteRange
+AbstractByteRange.put(int index,
   byte[] val) 
 
 
-abstract ByteRange
-AbstractByteRange.put(int index,
+ByteRange
+SimpleMutableByteRange.put(int index,
   byte[] val) 
 
 
@@ -648,21 +648,21 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
+SimpleByteRange.put(int index,
   byte[] val,
   int offset,
   int length) 
 
 
-ByteRange
-SimpleByteRange.put(int index,
+abstract ByteRange
+AbstractByteRange.put(int index,
   byte[] val,
   int offset,
   int length) 
 
 
-abstract ByteRange
-AbstractByteRange.put(int index,
+ByteRange
+SimpleMutableByteRange.put(int index,
   byte[] val,
   int offset,
   int length) 
@@ -679,17 +679,17 @@
 
 
 ByteRange
-SimpleMutableByteRange.putInt(int index,
+SimpleByteRange.putInt(int index,
 int val) 
 
 
-ByteRange
-SimpleByteRange.putInt(int index,
+abstract ByteRange
+AbstractByteRange.putInt(int index,
 int val) 
 
 
-abstract ByteRange
-AbstractByteRange.putInt(int index,
+ByteRange
+SimpleMutableByteRange.putInt(int index,
 int val) 
 
 
@@ -701,17 +701,17 @@
 
 
 ByteRange
-SimpleMutableByteRange.putLong(int index,
+SimpleByteRange.putLong(int index,
   long val) 
 
 
-ByteRange
-SimpleByteRange.putLong(int index,
+abstract ByteRange
+AbstractByteRange.putLong(int index,
   long val) 
 
 
-abstract ByteRange
-AbstractByteRange.putLong(int index,
+ByteRange
+SimpleMutableByteRange.putLong(int index,
   long val) 
 
 
@@ -723,17 +723,17 @@
 
 
 ByteRange
-SimpleMutableByteRange.putShort(int index,
+SimpleByteRange.putShort(int index,
 short val) 
 
 
-ByteRange
-SimpleByteRange.putShort(int index,
+abstract ByteRange
+AbstractByteRange.putShort(int index,
 short val) 

[16/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
index 8ccabae..be2d062 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
@@ -577,28 +577,28 @@
 
 
 protected Procedure[]
+SequentialProcedure.doExecute(TEnvironment env) 
+
+
+protected Procedure[]
 Procedure.doExecute(TEnvironment env)
 Internal method called by the ProcedureExecutor that starts 
the
  user-level code execute().
 
 
-
+
 protected Procedure[]
-SequentialProcedure.doExecute(TEnvironment env) 
+ProcedureExecutor.CompletedProcedureCleaner.execute(TEnvironment env) 
 
-
+
 protected abstract Procedure[]
 Procedure.execute(TEnvironment env)
 The main code of the procedure.
 
 
-
-protected Procedure[]
-StateMachineProcedure.execute(TEnvironment env) 
-
 
 protected Procedure[]
-ProcedureExecutor.CompletedProcedureCleaner.execute(TEnvironment env) 
+StateMachineProcedure.execute(TEnvironment env) 
 
 
 Procedure
@@ -610,13 +610,13 @@
 
 
 Procedure
-ProcedureRunnableSet.poll()
-Fetch one Procedure from the queue
-
+ProcedureSimpleRunQueue.poll() 
 
 
 Procedure
-ProcedureSimpleRunQueue.poll() 
+ProcedureRunnableSet.poll()
+Fetch one Procedure from the queue
+
 
 
 
@@ -652,13 +652,13 @@
 
 
 void
-ProcedureRunnableSet.addBack(Procedure proc)
-Inserts the specified element at the end of this 
queue.
-
+ProcedureSimpleRunQueue.addBack(Procedure proc) 
 
 
 void
-ProcedureSimpleRunQueue.addBack(Procedure proc) 
+ProcedureRunnableSet.addBack(Procedure proc)
+Inserts the specified element at the end of this 
queue.
+
 
 
 protected void
@@ -668,13 +668,13 @@
 
 
 void
-ProcedureRunnableSet.addFront(Procedure proc)
-Inserts the specified element at the front of this 
queue.
-
+ProcedureSimpleRunQueue.addFront(Procedure proc) 
 
 
 void
-ProcedureSimpleRunQueue.addFront(Procedure proc) 
+ProcedureRunnableSet.addFront(Procedure proc)
+Inserts the specified element at the front of this 
queue.
+
 
 
 protected void
@@ -689,13 +689,13 @@
 
 
 void
-ProcedureRunnableSet.completionCleanup(Procedure proc)
-The procedure in execution completed.
-
+ProcedureSimpleRunQueue.completionCleanup(Procedure proc) 
 
 
 void
-ProcedureSimpleRunQueue.completionCleanup(Procedure proc) 
+ProcedureRunnableSet.completionCleanup(Procedure proc)
+The procedure in execution completed.
+
 
 
 static 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure
@@ -787,13 +787,13 @@
 
 
 void
-ProcedureRunnableSet.yield(Procedure proc)
-The procedure can't run at the moment.
-
+ProcedureSimpleRunQueue.yield(Procedure proc) 
 
 
 void
-ProcedureSimpleRunQueue.yield(Procedure proc) 
+ProcedureRunnableSet.yield(Procedure proc)
+The procedure can't run at the moment.
+
 
 
 
@@ -840,16 +840,6 @@
 
 
 void
-NoopProcedureStore.insert(Procedure proc,
-Procedure[] subprocs) 
-
-
-void
-NoopProcedureStore.insert(Procedure proc,
-Procedure[] subprocs) 
-
-
-void
 ProcedureStore.insert(Procedure proc,
 Procedure[] subprocs)
 When a procedure is submitted to the executor insert(proc, 
null) will be called.
@@ -864,15 +854,25 @@
 
 
 void
-NoopProcedureStore.update(Procedure proc) 
+NoopProcedureStore.insert(Procedure proc,
+Procedure[] subprocs) 
 
 
 void
+NoopProcedureStore.insert(Procedure proc,
+Procedure[] subprocs) 
+
+
+void
 ProcedureStore.update(Procedure proc)
 The specified procedure was executed,
  and the new state should be written to the store.
 
 
+
+void
+NoopProcedureStore.update(Procedure proc) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
index eedd2e1..b991385 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
@@ -122,13 +122,13 @@
 
 
 void
-BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
   long procId) 
 
 
 void
-BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterObserver.preAbortProcedure(ObserverContext

[48/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 95d6c87..e72ed25 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -132,11 +132,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Query.getFilter() 
+Scan.getFilter() 
 
 
 Filter
-Scan.getFilter() 
+Query.getFilter() 
 
 
 
@@ -148,19 +148,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Scan
+Scan.setFilter(Filter filter) 
+
+
 Get
 Get.setFilter(Filter filter) 
 
-
+
 Query
 Query.setFilter(Filter filter)
 Apply the specified server-side filter when performing the 
Query.
 
 
-
-Scan
-Scan.setFilter(Filter filter) 
-
 
 
 
@@ -382,55 +382,55 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList

[51/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/31b3fd50
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/31b3fd50
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/31b3fd50

Branch: refs/heads/asf-site
Commit: 31b3fd50a6e8116d4ed9096026001651f4f2044f
Parents: c4be37a
Author: jenkins 
Authored: Fri Apr 22 15:21:29 2016 +
Committer: Misty Stanley-Jones 
Committed: Fri Apr 22 11:40:37 2016 -0700

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|  Bin 13389995 -> 13389995 
bytes
 apache_hbase_reference_guide.pdfmarks   |4 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   |4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |  188 +-
 .../hadoop/hbase/class-use/TableName.html   |   36 +-
 .../apache/hadoop/hbase/client/Durability.html  |4 +-
 .../hadoop/hbase/client/IsolationLevel.html |4 +-
 .../hadoop/hbase/client/class-use/Admin.html|6 +-
 .../hbase/client/class-use/Consistency.html |   10 +-
 .../hbase/client/class-use/Durability.html  |   20 +-
 .../hbase/client/class-use/IsolationLevel.html  |   10 +-
 .../hadoop/hbase/client/class-use/Mutation.html |8 +-
 .../hbase/client/class-use/RegionLocator.html   |6 +-
 .../hadoop/hbase/client/class-use/Result.html   |   48 +-
 .../hadoop/hbase/client/class-use/Row.html  |4 +-
 .../hadoop/hbase/client/class-use/Scan.html |6 +-
 .../hadoop/hbase/client/package-tree.html   |2 +-
 .../hbase/filter/CompareFilter.CompareOp.html   |4 +-
 .../filter/class-use/ByteArrayComparable.html   |8 +-
 .../class-use/CompareFilter.CompareOp.html  |8 +-
 .../filter/class-use/Filter.ReturnCode.html |   62 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |   64 +-
 .../hadoop/hbase/filter/package-tree.html   |4 +-
 .../io/class-use/ImmutableBytesWritable.html|   60 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|8 +-
 .../hbase/io/crypto/class-use/Cipher.html   |8 +-
 .../hbase/io/encoding/DataBlockEncoding.html|4 +-
 .../mapreduce/class-use/TableRecordReader.html  |4 +-
 .../hbase/quotas/ThrottlingException.Type.html  |4 +-
 .../hadoop/hbase/quotas/package-tree.html   |4 +-
 .../hadoop/hbase/regionserver/BloomType.html|4 +-
 .../hadoop/hbase/util/FastLongHistogram.html|   10 +-
 .../hadoop/hbase/util/class-use/ByteRange.html  |  126 +-
 .../hadoop/hbase/util/class-use/Order.html  |   42 +-
 .../hadoop/hbase/util/class-use/Pair.html   |4 +-
 .../util/class-use/PositionedByteRange.html |  174 +-
 apidocs/overview-tree.html  |   16 +-
 .../hadoop/hbase/util/FastLongHistogram.html|   75 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   | 2424 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |   42 +
 devapidocs/deprecated-list.html |  134 +-
 devapidocs/index-all.html   |   48 +
 .../org/apache/hadoop/hbase/KeyValue.Type.html  |4 +-
 .../hadoop/hbase/class-use/Abortable.html   |   22 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |  854 +++---
 .../hadoop/hbase/class-use/CellComparator.html  |  126 +-
 .../hadoop/hbase/class-use/CellScanner.html |   38 +-
 .../hadoop/hbase/class-use/ClusterStatus.html   |   14 +-
 .../hadoop/hbase/class-use/Coprocessor.html |   12 +-
 .../hbase/class-use/CoprocessorEnvironment.html |   54 +-
 .../hbase/class-use/HBaseIOException.html   |8 +-
 .../hbase/class-use/HColumnDescriptor.html  |  128 +-
 .../hadoop/hbase/class-use/HRegionInfo.html |  222 +-
 .../hadoop/hbase/class-use/HRegionLocation.html |   28 +-
 .../hbase/class-use/HTableDescriptor.html   |  254 +-
 .../hbase/class-use/KeepDeletedCells.html   |8 +-
 .../class-use/KeyValue.KeyOnlyKeyValue.html |4 +-
 .../apache/hadoop/hbase/class-use/KeyValue.html |   20 +-
 .../hbase/class-use/NamespaceDescriptor.html|  116 +-
 .../hadoop/hbase/class-use/ProcedureInfo.html   |   12 +-
 .../hadoop/hbase/class-use/RegionLocations.html |   24 +-
 .../hadoop/hbase/class-use/ScheduledChore.html  |   32 +-
 .../apache/hadoop/hbase/class-use/Server.html   |  114 +-
 .../hadoop/hbase/class-use/ServerName.html  |  238 +-
 .../hadoo

[24/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 964f396..856ab2e 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -158,11 +158,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Query.getFilter() 
+Scan.getFilter() 
 
 
 Filter
-Scan.getFilter() 
+Query.getFilter() 
 
 
 
@@ -174,18 +174,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Query
-Query.setFilter(Filter filter)
-Apply the specified server-side filter when performing the 
Query.
-
+Get
+Get.setFilter(Filter filter) 
 
 
 Scan
 Scan.setFilter(Filter filter) 
 
 
-Get
-Get.setFilter(Filter filter) 
+Query
+Query.setFilter(Filter filter)
+Apply the specified server-side filter when performing the 
Query.
+
 
 
 
@@ -452,81 +452,81 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-FilterBase.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
-Given the filter's arguments it constructs the filter
-
+MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FilterBase.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
+Given the filter's arguments it constructs the filter
+
 
 
 static Filter
-MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 titl

[44/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/coc.html
--
diff --git a/coc.html b/coc.html
index a037f15..8a6456a 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-21
+  Last Published: 
2016-04-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index da609a4..70fd5be 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-21
+  Last Published: 
2016-04-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 4bdb6be..1a1e321 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-21
+  Last Published: 
2016-04-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 87e3063..1f33e09 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -1702,7 +1702,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-21
+  Last Published: 
2016-04-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 23da398..7c75e3a 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-21
+  Last Published: 
2016-04-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 7dc61e9..098ad63 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependency Management
 
@@ -798,7 +798,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-21
+  Last Published: 
2016-04-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 9b52e42..1b82dc0 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -15977,6 +15977,20 @@
 
 
 
+
+
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+MEMSTORE_SIZE
+"memstoreSize"
+
+
+
+
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.la

[26/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index 8b76c3f..05e06b1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -191,11 +191,11 @@
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
@@ -211,7 +211,7 @@
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
@@ -219,7 +219,7 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
@@ -235,13 +235,13 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
@@ -255,13 +255,13 @@
 
 
 void
-BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamilyHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamilyHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily) 
 
@@ -279,7 +279,7 @@
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
@@ -287,7 +287,7 @@
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
 Deprecated. 
@@ -302,12 +302,12 @@
 
 
 void
-BaseMasterObserver.postAddRSGroup(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddRSGroup(ObserverContext ctx,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
 
 void
-BaseMasterAndRegionObserver.postAddRSGroup(ObserverContext ctx,
+BaseMasterObserver.postAddRSGroup(ObserverContext ctx,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name) 
 
 
@@ -333,12 +333,12 @@
 
 
 void
-BaseMasterObserver.postAssign(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
 
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
+BaseMasterObserver

[45/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index efa4ee0..b239700 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -283,7 +283,7 @@
 1743
 0
 0
-12440
+12446
 
 Files
 
@@ -298,5720 +298,5720 @@
 0
 1
 
-maven-archiver/pom.properties
-0
-0
-1
-
 org/apache/hadoop/hbase/AuthUtil.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/BaseConfigurable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/Cell.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/CellComparator.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/CellScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/CellUtil.java
 0
 0
 95
-
+
 org/apache/hadoop/hbase/ChoreService.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/ClusterId.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ClusterStatus.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/CompatibilityFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/CompoundConfiguration.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/CoordinatedStateManagerFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/CoprocessorEnvironment.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/DoNotRetryIOException.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/DroppedSnapshotException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HBaseConfiguration.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/HBaseIOException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
 0
 50
-
+
 org/apache/hadoop/hbase/HConstants.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HRegionInfo.java
 0
 0
 58
-
+
 org/apache/hadoop/hbase/HRegionLocation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HTableDescriptor.java
 0
 0
 47
-
+
 org/apache/hadoop/hbase/HealthChecker.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/JMXListener.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/KeyValue.java
 0
 0
 136
-
+
 org/apache/hadoop/hbase/KeyValueTestUtil.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/KeyValueUtil.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/LocalHBaseCluster.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/MetaMutationAnnotation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/MetaTableAccessor.java
 0
 0
 116
-
+
 org/apache/hadoop/hbase/NamespaceDescriptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ProcedureInfo.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/RegionLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/RegionLocations.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/RegionStateListener.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ScheduledChore.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/ServerLoad.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ServerName.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/SettableSequenceId.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SettableTimestamp.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogCounters.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogTask.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/Streamable.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/TableDescriptor.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/TableDescriptors.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/TableInfoMissingException.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/TableName.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/TagType.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/TagUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ZKNamespaceManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ZNodeClearer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/classification/tools/StabilityOptions.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/AbstractClientScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/Action.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/Admin.java
 0
 0
 69
-
+
 org/apache/hadoop/hbase/client/Append.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/client/BufferedMutator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/h

[38/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 668286f..4ac5fdf 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -529,66 +529,66 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
+Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
-Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
-
-
-HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String regex)
 Deletes tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String regex)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern pattern)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in 
java.util.regex">Pattern pattern) 
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String regex)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
+
+HTableDescriptor[]
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
+
 
 HTableDescriptor
 HConnection.getHTableDescriptor(byte[] tableName)
@@ -658,39 +658,39 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-protected HTableDescriptor
-HBaseAdmin.CreateTableFuture.getTableDescriptor() 
-
-
-protected HTableDescriptor
-HBaseAdmin.Tab

[34/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
index fe6e1f2..33cbf05 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
@@ -901,7 +901,7 @@
 
 
 void
-ReplicationSourceInterface.init(org.apache.hadoop.conf.Configuration conf,
+ReplicationSource.init(org.apache.hadoop.conf.Configuration conf,
 org.apache.hadoop.fs.FileSystem fs,
 ReplicationSourceManager manager,
 ReplicationQueues replicationQueues,
@@ -911,12 +911,12 @@
 http://docs.oracle.com/javase/7/docs/api/java/util/UUID.html?is-external=true";
 title="class or interface in java.util">UUID clusterId,
 ReplicationEndpoint replicationEndpoint,
 MetricsSource metrics)
-Initializer for the source
+Instantiation method used by region servers
 
 
 
 void
-ReplicationSource.init(org.apache.hadoop.conf.Configuration conf,
+ReplicationSourceInterface.init(org.apache.hadoop.conf.Configuration conf,
 org.apache.hadoop.fs.FileSystem fs,
 ReplicationSourceManager manager,
 ReplicationQueues replicationQueues,
@@ -926,7 +926,7 @@
 http://docs.oracle.com/javase/7/docs/api/java/util/UUID.html?is-external=true";
 title="class or interface in java.util">UUID clusterId,
 ReplicationEndpoint replicationEndpoint,
 MetricsSource metrics)
-Instantiation method used by region servers
+Initializer for the source
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
index 4f49372..967b9af 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
@@ -98,34 +98,29 @@
 
 
 void
-HBaseAdmin.cloneSnapshot(byte[] snapshotName,
-  TableName tableName) 
-
-
-void
 Admin.cloneSnapshot(byte[] snapshotName,
   TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 void
-HBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+HBaseAdmin.cloneSnapshot(byte[] snapshotName,
   TableName tableName) 
 
-
+
 void
 Admin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
   TableName tableName)
 Create a new table by cloning the snapshot content.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
-HBaseAdmin.cloneSnapshotAsync(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
-TableName tableName) 
-
 
+void
+HBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+  TableName tableName) 
+
+
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.cloneSnapshotAsync(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
 TableName tableName)
@@ -133,6 +128,11 @@
  and wait for it be completely cloned.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
+HBaseAdmin.cloneSnapshotAsync(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+TableName tableName) 
+
 
 
 



[23/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index c421b6d..6ad0fda 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -179,24 +179,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
-  org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
-
-
-org.apache.hadoop.mapred.RecordReader
 TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter)
 Builds a TableRecordReader.
 
 
-
+
 org.apache.hadoop.mapred.RecordReader
 TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter) 
 
+
+org.apache.hadoop.mapred.RecordReader
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+  org.apache.hadoop.mapred.JobConf job,
+  
org.apache.hadoop.mapred.Reporter reporter) 
+
 
 
 
@@ -214,12 +214,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+  org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -232,10 +230,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+  Result value,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter) 
+  org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 boolean
@@ -277,12 +277,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+  org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -295,10 +293,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+IdentityTableMap.map(ImmutableBytesWritable key,
+  Result value,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter) 
+  org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -349,11 +349,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-HashTable.TableHash.Reader.key 
+MultithreadedTableMapper.SubMapRecordReader.key 
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key 
+HashTable.TableHash.Reader.key 
 
 
 (package private) ImmutableBytesWritable
@@ -429,18 +429,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormatImpl.RecordReader.getCurrentKey() 
+MultithreadedTableMapper.SubMapRecordReader.getCurrentKey() 
 
 
 ImmutableBytesWritable
-TableRecordReader.getCurrentKey()
-Returns the current key.
-
+TableSnapshotInputFormatImpl.RecordReader.getCurrentKey() 
 
 
 ImmutableBytesWritable
-HashTable.TableHash.Reader.getCurrentKey()
-Get the current key
+TableRecordReader.getCurrentKey()
+Returns the current key.
 
 
 
@@ -449,7 +447,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-MultithreadedTable

[15/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregate.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregate.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregate.html
index 9bb4e19..f555b3d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregate.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregate.html
@@ -114,17 +114,35 @@
 
 
 long
+getMemstoresSize(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+Get the memory store size against this table
+
+
+
+long
 getReadRequestsCount(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
 Get the number of read requests that have been issued 
against this table
 
 
+
+long
+getStoreFilesSize(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+Get the store file size against this table
+
+
 
 long
+getTableSize(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+Get the table region size against this table
+
+
+
+long
 getTotalRequestsCount(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
 Get the total number of requests that have been issued 
against this table
 
 
-
+
 long
 getWriteRequestsCount(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
 Get the number of write requests that have been issued 
against this table
@@ -168,13 +186,43 @@
 
 
 
-
+
 
 getTotalRequestsCount
 long getTotalRequestsCount(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
 Get the total number of requests that have been issued 
against this table
 
 
+
+
+
+
+
+getMemstoresSize
+long getMemstoresSize(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+Get the memory store size against this table
+
+
+
+
+
+
+
+getStoreFilesSize
+long getStoreFilesSize(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+Get the store file size against this table
+
+
+
+
+
+
+
+getTableSize
+long getTableSize(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+Get the table region size against this table
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.MetricsTableValues.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.MetricsTableValues.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.MetricsTableValues.html
index 0d6602b..8769c40 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.MetricsTableValues.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.MetricsTableValues.html
@@ -99,7 +99,7 @@
 
 
 
-private static class MetricsTableWrapperAggregateImpl.MetricsTableValues
+private static class MetricsTableWrapperAggregateImpl.MetricsTableValues
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 
@@ -121,14 +121,26 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 private long
+memstoresSize 
+
+
+private long
 readRequestsCount 
 
+
+private long
+storeFilesSize 
+
 
 private long
-totalRequestsCount 
+tableSize 
 
 
 private long
+totalRequestsCount 
+
+
+private long
 writeRequestsCount 
 
 
@@ -167,22 +179,46 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 long
+getMemstoresSize() 
+
+
+long
 getReadRequestsCount() 
 
+
+long
+getStoreFilesSize() 
+
 
 long
-getTotalRequestsCount() 
+getTableSize() 
 
 
 long
+getTotalRequestsCount() 
+
+
+long
 getWriteRequestsCount() 
 
+
+void
+setMemstoresSize(long memstoresSize) 
+
 
 void
 setReadRequestsCount(long readRequestsCount) 
 
 
 void
+setStoreFilesSize(long storeFilesSize) 
+
+
+void
+setTableSize(long tableSize) 
+
+
+void
 setTotalRequestsCount(long totalRequestsCount) 
 
 
@@ -217,7 +253,7 @@ extends ht

[12/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
index e187539..02d3fff 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/Store.html
@@ -491,13 +491,13 @@
 
 
 
-protected Store
-StoreFlusher.store 
-
-
 private Store
 CompactSplitThread.CompactionRunner.store 
 
+
+protected Store
+StoreFlusher.store 
+
 
 protected Store
 StoreScanner.store 
@@ -631,29 +631,29 @@
 
 
 protected void
-DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
-CellComparator kvComparator) 
+CellComparator comparator) 
 
 
-protected abstract void
-StoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+protected void
+DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
-CellComparator kvComparator)
-Create the StoreEngine's components.
-
+CellComparator kvComparator) 
 
 
 protected void
-DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
 CellComparator kvComparator) 
 
 
-protected void
-StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+protected abstract void
+StoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
-CellComparator comparator) 
+CellComparator kvComparator)
+Create the StoreEngine's components.
+
 
 
 private void
@@ -663,18 +663,18 @@
 
 
 protected StoreEngine
-HMobStore.createStoreEngine(Store store,
+HStore.createStoreEngine(Store store,
   
org.apache.hadoop.conf.Configuration conf,
-  CellComparator cellComparator)
-Creates the mob store engine.
+  CellComparator kvComparator)
+Creates the store engine configured for the given 
Store.
 
 
 
 protected StoreEngine
-HStore.createStoreEngine(Store store,
+HMobStore.createStoreEngine(Store store,
   
org.apache.hadoop.conf.Configuration conf,
-  CellComparator kvComparator)
-Creates the store engine configured for the given 
Store.
+  CellComparator cellComparator)
+Creates the mob store engine.
 
 
 
@@ -799,33 +799,33 @@
 
 
 CompactionRequest
-CompactionRequestor.requestCompaction(Region r,
+CompactSplitThread.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
   CompactionRequest request) 
 
 
 CompactionRequest
-CompactSplitThread.requestCompaction(Region r,
+CompactionRequestor.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
   CompactionRequest request) 
 
 
 CompactionRequest
-CompactionRequestor.requestCompaction(Region r,
+CompactSplitThread.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
-  int pri,
+  int priority,
   CompactionRequest request,
   User user) 
 
 
 CompactionRequest
-CompactSplitThread.requestCompaction(Region r,
+CompactionRequestor.requestCompaction(Region r,
   Store s,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String why,
-  int priority,
+  int pri,
   CompactionRequest request,
   User user)

[49/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html 
b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
index ac83789..f3bb248 100644
--- a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
+++ b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
@@ -243,7 +243,7 @@ the order they are declared.
 
 
 values
-public static IsolationLevel[] values()
+public static IsolationLevel[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -260,7 +260,7 @@ for (IsolationLevel c : IsolationLevel.values())
 
 
 valueOf
-public static IsolationLevel valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static IsolationLevel valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
index 63a3845..72528d4 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
@@ -105,14 +105,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Admin
-Connection.getAdmin()
+HConnection.getAdmin()
+Deprecated. 
 Retrieve an Admin implementation to administer an HBase 
cluster.
 
 
 
 Admin
-HConnection.getAdmin()
-Deprecated. 
+Connection.getAdmin()
 Retrieve an Admin implementation to administer an HBase 
cluster.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index d0e70a0..b816f04 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -138,19 +138,19 @@ the order they are declared.
 
 
 
+Scan
+Scan.setConsistency(Consistency consistency) 
+
+
 Get
 Get.setConsistency(Consistency consistency) 
 
-
+
 Query
 Query.setConsistency(Consistency consistency)
 Sets the consistency level for this operation
 
 
-
-Scan
-Scan.setConsistency(Consistency consistency) 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index 4621a1a..6ff7a5c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -191,27 +191,27 @@ the order they are declared.
 
 
 
-Delete
-Delete.setDurability(Durability d) 
+Append
+Append.setDurability(Durability d) 
 
 
-Put
-Put.setDurability(Durability d) 
-
-
 Increment
 Increment.setDurability(Durability d) 
 
+
+Put
+Put.setDurability(Durability d) 
+
 
+Delete
+Delete.setDurability(Durability d) 
+
+
 Mutation
 Mutation.setDurability(Durability d)
 Set the durability for this mutation
 
 
-
-Append
-Append.setDurability(Durability d) 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/apidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
index c7f564f..df9ed0a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
@@ -131,19 +131,19 @@ the order they are declared.
 
 
 
+Scan
+Scan.setIsolationLevel(IsolationLevel level) 
+
+
 Get
 Get.setIsolationLevel(IsolationLevel level) 
 
-
+
 Query
 Query.setIsolationLevel(IsolationLevel level)
 Set the isolation level for this query.
 
 
-
-Scan
-Scan.setIsola

[40/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index bb00db6..68116ad 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -863,19 +863,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-RegionServerCallable.getHRegionInfo() 
+ScannerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo() 
+MultiServerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-MultiServerCallable.getHRegionInfo() 
+ScannerCallableWithReplicas.getHRegionInfo() 
 
 
 HRegionInfo
-ScannerCallable.getHRegionInfo() 
+RegionServerCallable.getHRegionInfo() 
 
 
 private HRegionInfo
@@ -905,28 +905,28 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.getOnlineRegions(ServerName sn) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 Admin.getOnlineRegions(ServerName sn)
 Get all the online regions on a region server.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+HBaseAdmin.getOnlineRegions(ServerName sn) 
+
 
 (package private) Pair
 HBaseAdmin.getRegion(byte[] regionName) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HBaseAdmin.getTableRegions(TableName tableName) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 Admin.getTableRegions(TableName tableName)
 Get the regions of a given table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+HBaseAdmin.getTableRegions(TableName tableName) 
+
 
 
 
@@ -944,16 +944,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-HBaseAdmin.closeRegion(ServerName sn,
-  HRegionInfo hri) 
-
-
-void
 Admin.closeRegion(ServerName sn,
   HRegionInfo hri)
 Close a region.
 
 
+
+void
+HBaseAdmin.closeRegion(ServerName sn,
+  HRegionInfo hri) 
+
 
 private void
 HBaseAdmin.compact(ServerName sn,
@@ -1071,17 +1071,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-ZKSplitLogManagerCoordination.markRegionsRecovering(ServerName serverName,
+SplitLogManagerCoordination.markRegionsRecovering(ServerName serverName,
   http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set userRegions)
-Create znodes 
/hbase/recovering-regions/[region_ids...]/[failed region server names ...] for
- all regions of the passed in region servers
+Mark regions in recovering state for distributed log 
replay
 
 
 
 void
-SplitLogManagerCoordination.markRegionsRecovering(ServerName serverName,
+ZKSplitLogManagerCoordination.markRegionsRecovering(ServerName serverName,
   http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set userRegions)
-Mark regions in recovering state for distributed log 
replay
+Create znodes 
/hbase/recovering-regions/[region_ids...]/[failed region server names ...] for
+ all regions of the passed in region servers
 
 
 
@@ -1120,12 +1120,12 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postAssign(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
 
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
+BaseMasterObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
 
@@ -1138,13 +1138,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postCreateTable(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCreateTable(ObserverContext ctx,
   HTableDescriptor desc,
  

[03/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
index 23cc622..ab52d21 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
@@ -319,10 +319,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 PairInteger,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer>
-HBaseAdmin.getAlterStatus(byte[] tableName) 
-
-
-PairInteger,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer>
 Admin.getAlterStatus(byte[] tableName)
 Deprecated. 
 Since 2.0.0. Will be removed in 3.0.0. Use Admin.getAlterStatus(TableName)
@@ -330,17 +326,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
+
 PairInteger,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer>
-HBaseAdmin.getAlterStatus(TableName tableName) 
+HBaseAdmin.getAlterStatus(byte[] tableName) 
 
-
+
 PairInteger,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer>
 Admin.getAlterStatus(TableName tableName)
 Get the status of alter command - indicates how many 
regions have received the updated schema
  Asynchronous operation.
 
 
+
+PairInteger,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer>
+HBaseAdmin.getAlterStatus(TableName tableName) 
+
 
 private PairList,http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List>
 HTable.getKeysAndRegionsInRange(byte[] startKey,
@@ -366,14 +366,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Pair
-RegionLocator.getStartEndKeys()
+HRegionLocator.getStartEndKeys()
 Gets the starting and ending row keys for every region in 
the currently
  open table.
 
 
 
 Pair
-HRegionLocator.getStartEndKeys()
+RegionLocator.getStartEndKeys()
 Gets the starting and ending row keys for every region in 
the currently
  open table.
 
@@ -704,32 +704,32 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Pair
-RpcServer.call(com.google.protobuf.BlockingService service,
+RpcServerInterface.call(com.google.protobuf.BlockingService service,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 CellScanner cellScanner,
 long receiveTime,
-MonitoredRPCHandler status)
-This is a server side method, which is invoked over 
RPC.
-
+MonitoredRPCHandler status) 
 
 
 Pair
-RpcServerInterface.call(com.google.protobuf.BlockingService service,
+RpcServer.call(com.google.protobuf.BlockingService service,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 CellScanner cellScanner,
 long receiveTime,
-MonitoredRPCHandler status) 
+MonitoredRPCHandler status)
+This is a server side method, which is invoked over 
RPC.
+
 
 
-protected Pair
-AsyncRpcClient.call(PayloadCarryingRpcController pcrc,
+protected abstract Pair
+AbstractRpcClient.call(PayloadCarryingRpcController pcrc,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 com.google.protobuf.Message returnType,
 User ticket,
-http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">I

[06/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html 
b/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
index 7131c6c..c926d96 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
@@ -339,13 +339,13 @@ service.
 
 
 private UserProvider
-AccessController.userProvider
-Provider for mapping principal names to Users
-
+SecureBulkLoadEndpoint.userProvider 
 
 
 private UserProvider
-SecureBulkLoadEndpoint.userProvider 
+AccessController.userProvider
+Provider for mapping principal names to Users
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index c1a7fe1..4eed6cf 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -152,9 +152,9 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.security.SaslStatus
-org.apache.hadoop.hbase.security.AuthMethod
 org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection
+org.apache.hadoop.hbase.security.AuthMethod
+org.apache.hadoop.hbase.security.SaslStatus
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/security/visibility/class-use/Authorizations.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/class-use/Authorizations.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/class-use/Authorizations.html
index a33ad5e..20320be 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/class-use/Authorizations.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/class-use/Authorizations.html
@@ -114,18 +114,18 @@
 
 
 
-Query
-Query.setAuthorizations(Authorizations authorizations)
-Sets the authorizations to be used by this Query
-
+Get
+Get.setAuthorizations(Authorizations authorizations) 
 
 
 Scan
 Scan.setAuthorizations(Authorizations authorizations) 
 
 
-Get
-Get.setAuthorizations(Authorizations authorizations) 
+Query
+Query.setAuthorizations(Authorizations authorizations)
+Sets the authorizations to be used by this Query
+
 
 
 
@@ -148,31 +148,31 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
-FeedUserAuthScanLabelGenerator.getLabels(User user,
-  Authorizations authorizations) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 EnforcingScanLabelGenerator.getLabels(User user,
   Authorizations authorizations) 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 ScanLabelGenerator.getLabels(User user,
   Authorizations authorizations)
 Helps to get a list of lables associated with an UGI
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 DefinedSetFilterScanLabelGenerator.getLabels(User user,
   Authorizations authorizations) 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 SimpleScanLabelG

[30/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index a87b0d6..dd062e6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -375,13 +375,13 @@ service.
 
 
 Result
-HTable.append(Append append)
+Table.append(Append append)
 Appends values to one or more columns within a single 
row.
 
 
 
 Result
-Table.append(Append append)
+HTable.append(Append append)
 Appends values to one or more columns within a single 
row.
 
 
@@ -398,8 +398,8 @@ service.
 
 
 
-Result
-RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
+Result[]
+ScannerCallable.call(int callTimeout) 
 
 
 Result[]
@@ -410,8 +410,8 @@ service.
 ClientSmallScanner.SmallScannerCallable.call(int timeout) 
 
 
-Result[]
-ScannerCallable.call(int callTimeout) 
+Result
+RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
 
 
 (package private) Result[]
@@ -474,13 +474,13 @@ service.
 
 
 Result
-HTable.get(Get get)
+Table.get(Get get)
 Extracts certain cells from a given row.
 
 
 
 Result
-Table.get(Get get)
+HTable.get(Get get)
 Extracts certain cells from a given row.
 
 
@@ -495,13 +495,13 @@ service.
 
 
 Result[]
-HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 Extracts certain cells from the given rows, in batch.
 
 
 
 Result[]
-Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 Extracts certain cells from the given rows, in batch.
 
 
@@ -511,13 +511,13 @@ service.
 
 
 Result
-HTable.increment(Increment increment)
+Table.increment(Increment increment)
 Increments one or more columns within a single row.
 
 
 
 Result
-Table.increment(Increment increment)
+HTable.increment(Increment increment)
 Increments one or more columns within a single row.
 
 
@@ -527,33 +527,33 @@ service.
 
 
 Result
-ClientSmallReversedScanner.next() 
+ClientSmallScanner.next() 
 
 
 Result
-ResultScanner.next()
-Grab the next row's worth of values.
-
+ClientAsyncPrefetchScanner.next() 
 
 
 Result
-ClientSmallScanner.next() 
+ClientSimpleScanner.next() 
 
 
 Result
-ClientSimpleScanner.next() 
+ResultScanner.next()
+Grab the next row's worth of values.
+
 
 
 Result
-ClientAsyncPrefetchScanner.next() 
+ClientSmallReversedScanner.next() 
 
 
 Result
-TableSnapshotScanner.next() 
+ClientSideRegionScanner.next() 
 
 
 Result
-ClientSideRegionScanner.next() 
+TableSnapshotScanner.next() 
 
 
 Result[]
@@ -908,24 +908,24 @@ service.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
-  org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
-
-
-org.apache.hadoop.mapred.RecordReader
 TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter)
 Builds a TableRecordReader.
 
 
-
+
 org.apache.hadoop.mapred.RecordReader
 TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter) 
 
+
+org.apache.hadoop.mapred.RecordReader
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+  org.apache.hadoop.mapred.JobConf job,
+  
org.apache.hadoop.mapred.Reporter reporter) 
+
 
 
 
@@ -943,12 +943,10 @@ service.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+  org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -961,10 +959,12 @@ service.
 
 
 void
-Ro

[42/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 53022aa..fe81594 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -246,7 +246,7 @@
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
@@ -256,20 +256,20 @@
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
+DataBlockEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
 
 
 
@@ -298,31 +298,31 @@
 
 
 
-protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
-
-
-
 private CellComparator
 HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
 Needed doing lookup on blocks.
 
 
-
+
 protected CellComparator
 HFile.WriterFactory.comparator 
 
+
+private CellComparator
+HFileReaderImpl.comparator
+Key comparator
+
+
 
 protected CellComparator
-CompoundBloomFilterBase.comparator
-Comparator used to compare Bloom filter keys
+HFileWriterImpl.comparator
+Key comparator.
 
 
 
-private CellComparator
-HFileReaderImpl.comparator
-Key comparator
+protected CellComparator
+CompoundBloomFilterBase.comparator
+Comparator used to compare Bloom filter keys
 
 
 
@@ -500,44 +500,44 @@
 
 
 private CellComparator
-StoreFileWriter.Builder.comparator 
+ScanInfo.comparator 
 
 
 protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator 
+HRegion.RegionScannerImpl.comparator 
 
 
-protected CellComparator
-HRegion.RegionScannerImpl.comparator 
+private CellComparator
+HStore.comparator 
 
 
 private CellComparator
-ScanInfo.comparator 
+Segment.comparator 
 
 
 private CellComparator
-AbstractMemStore.comparator 
+StoreFileWriter.Builder.comparator 
 
 
 private CellComparator
-HStore.comparator 
+AbstractMemStore.comparator 
 
 
-private CellComparator
-Segment.comparator 
+protected CellComparator
+StripeStoreFlusher.StripeFlushRequest.comparator 
 
 
 protected CellComparator
 StripeMultiFileWriter.comparator 
 
 
-protected CellComparator
-KeyValueHeap.KVScannerComparator.kvComparator 
-
-
 private CellComparator
 DefaultStoreFileManager.kvComparator 
 
+
+protected CellComparator
+KeyValueHeap.KVScannerComparator.kvComparator 
+
 
 private CellComparator
 ScanQueryMatcher.rowComparator
@@ -565,27 +565,19 @@
 
 
 CellComparator
-KeyValueHeap.KVScannerComparator.getComparator() 
+ScanInfo.getComparator() 
 
 
-CellComparator
-Store.getComparator() 
+(package private) CellComparator
+StoreFileScanner.getComparator() 
 
 
 CellComparator
-ScanInfo.getComparator() 
-
-
-CellComparator
 StoreFileReader.getComparator() 
 
-
-(package private) CellComparator
-StoreFileScanner.getComparator() 
-
 
-protected CellComparator
-AbstractMemStore.getComparator() 
+CellComparator
+Store.getComparator() 
 
 
 CellComparator
@@ -597,6 +589,14 @@
 Returns the Cell comparator used by this segment
 
 
+
+protected CellComparator
+AbstractMemStore.getComparator() 
+
+
+CellComparator
+KeyValueHeap.KVScannerComparator.getComparator() 
+
 
 
 
@@ -630,29 +630,29 @@
 
 
 protected void
-DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
-CellComparator kvComparator) 
+CellComparator comparator) 
 
 
-protected abstract void
-StoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+protected void
+DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
-CellComparator kvComparator)
-Create the StoreEngine's components.
-
+C

[11/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
index 49b1146..8353b1f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
@@ -602,13 +602,13 @@
 
 
 
-com.google.common.collect.ImmutableCollection
-StripeStoreFileManager.clearCompactedFiles() 
-
-
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 DefaultStoreFileManager.clearCompactedFiles() 
 
+
+com.google.common.collect.ImmutableCollection
+StripeStoreFileManager.clearCompactedFiles() 
+
 
 com.google.common.collect.ImmutableCollection
 StoreFileManager.clearFiles()
@@ -617,25 +617,25 @@
 
 
 com.google.common.collect.ImmutableCollection
-StripeStoreFileManager.clearFiles() 
+DefaultStoreFileManager.clearFiles() 
 
 
 com.google.common.collect.ImmutableCollection
-DefaultStoreFileManager.clearFiles() 
+StripeStoreFileManager.clearFiles() 
 
 
+http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapList>
+HRegion.close()
+Close down this HRegion.
+
+
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 Store.close()
 Close all the readers We don't need to worry about 
subsequent requests because the Region
  holds a write lock that will prevent any more reads or writes.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapList>
-HRegion.close()
-Close down this HRegion.
-
-
 
 com.google.common.collect.ImmutableCollection
 HStore.close() 
@@ -666,16 +666,16 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HMobStore.compact(CompactionContext compaction,
+HStore.compact(CompactionContext compaction,
   ThroughputController throughputController)
-The compaction in the store of mob.
+Compact the StoreFiles.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HStore.compact(CompactionContext compaction,
+HMobStore.compact(CompactionContext compaction,
   ThroughputController throughputController)
-Compact the StoreFiles.
+The compaction in the store of mob.
 
 
 
@@ -710,15 +710,15 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
 See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
  for details on this methods.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
-
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 StoreFileManager.getCompactedfiles()
@@ -728,11 +728,11 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
-StripeStoreFileManager.getCompactedfiles() 
+DefaultStoreFileManager.getCompactedfiles() 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
-DefaultStoreFileManager.getCompactedfiles() 
+StripeStoreFileManager.getCompactedfiles() 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayListList>
@@ -748,13 +748,13 @@
 
 

[10/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
index 11f1f14..095938e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html
@@ -246,14 +246,14 @@
 
 
 InternalScanner
-Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 
 
 
 InternalScanner
-StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
index 2ede2d9..5d964e7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
@@ -403,17 +403,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-protected abstract http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
-AbstractMultiFileWriter.writers() 
-
-
 protected http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 DateTieredMultiFileWriter.writers() 
 
-
+
 protected http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 StripeMultiFileWriter.writers() 
 
+
+protected abstract http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+AbstractMultiFileWriter.writers() 
+
 
 
 
@@ -431,13 +431,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 protected void
-AbstractMultiFileWriter.preCloseWriter(StoreFileWriter writer)
-Subclasses override this method to be called before we 
close the give writer.
-
+StripeMultiFileWriter.preCloseWriter(StoreFileWriter writer) 
 
 
 protected void
-StripeMultiFileWriter.preCloseWriter(StoreFileWriter writer) 
+AbstractMultiFileWriter.preCloseWriter(StoreFileWriter writer)
+Subclasses override this method to be called before we 
close the give writer.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
index 961cd25..064e800 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html
@@ -100,11 +100,11 @@
 
 
 private StripeStoreConfig
-StripeStoreFileManager.config 
+StripeStoreEngine.config 
 
 
 private StripeStoreConfig
-StripeStoreEngine.config 
+StripeStoreFileManager.config 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/

[21/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
index 4143193..b0b221b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheStats.html
@@ -150,14 +150,14 @@
 
 
 CacheStats
-CombinedBlockCache.getStats() 
-
-
-CacheStats
 BlockCache.getStats()
 Get the statistics for this block cache.
 
 
+
+CacheStats
+CombinedBlockCache.getStats() 
+
 
 CacheStats
 LruBlockCache.getStats()

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
index 3693542..be720cb 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
@@ -168,13 +168,6 @@
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
-boolean caching,
-boolean repeat,
-boolean updateCacheMetrics) 
-
-
-Cacheable
 BlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
@@ -182,14 +175,14 @@
 Fetch block from cache.
 
 
-
+
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
 boolean updateCacheMetrics) 
 
-
+
 Cacheable
 LruBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
@@ -198,6 +191,13 @@
 Get the buffer of the block with the specified name.
 
 
+
+Cacheable
+InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+boolean caching,
+boolean repeat,
+boolean updateCacheMetrics) 
+
 
 Cacheable
 MemcachedBlockCache.getBlock(BlockCacheKey cacheKey,
@@ -245,16 +245,16 @@
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
-Cacheable buf) 
-
-
-void
 BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
 Add block to cache (defaults to not in-memory).
 
 
+
+void
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+Cacheable buf) 
+
 
 void
 LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
@@ -269,13 +269,6 @@
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
-Cacheable buf,
-boolean inMemory,
-boolean cacheDataInL1) 
-
-
-void
 BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
@@ -283,14 +276,14 @@
 Add block to cache.
 
 
-
+
 void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
 boolean cacheDataInL1) 
 
-
+
 void
 LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
@@ -299,6 +292,13 @@
 Cache the block with the specified name and buffer.
 
 
+
+void
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+Cacheable buf,
+boolean inMemory,
+boolean cacheDataInL1) 
+
 
 void
 MemcachedBlockCache.cacheBlock(BlockCacheKey cacheKey,
@@ -313,17 +313,17 @@
 
 
 void
-CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
-  Cacheable block) 
-
-
-void
 BlockCache.returnBlock(BlockCacheKey cacheKey,
   Cacheable block)
 Called when the scanner using the block decides to return 
the block once its usage
  is over.
 
 
+
+void
+CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
+  Cacheable block) 
+
 
 void
 LruBlockCache.returnBlock(BlockCacheKey cacheKey,
@@ -411,24 +411,24 @@
 
 
 Cacheable
-FileMmapEngine.read(long offset,
-int length,
-CacheableDeserializer deserializer) 
-
-
-Cacheable
 IOEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer)
 Transfers data from IOEngine to a Cacheable object.
 
 
-
+
 Cacheable
 ByteBufferIOEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
+
+Cacheable
+FileMmapEngine.read(long offset,
+int length,
+CacheableDeserializer deserializer) 
+
 
 Cacheable
 FileIOEngine.read(long 

[09/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/class-use/Compactor.FileDetails.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/class-use/Compactor.FileDetails.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/class-use/Compactor.FileDetails.html
index d1db97f..1f7e9b9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/class-use/Compactor.FileDetails.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/class-use/Compactor.FileDetails.html
@@ -166,14 +166,14 @@
 
 
 InternalScanner
-Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 
 
 
 InternalScanner
-StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 3c4860b..4439ff9 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -641,24 +641,24 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
-org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
+org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode
+org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteCompare
 org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
 org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
-org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
 org.apache.hadoop.hbase.regionserver.MemStoreScanner.Type
+org.apache.hadoop.hbase.regionserver.RegionOpeningState
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.ScanType
 org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.RegionMergeTransactionPhase
-org.apache.hadoop.hbase.regionserver.FlushType
-org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult
+org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
+org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
+org.apache.hadoop.hbase.regionserver.Region.Operation
 org.apache.hadoop.hbase.regionserver.SplitTransaction.SplitTransactionPhase
 org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
-org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode
-org.apache.hadoop.hbase.regionserver.ScanType
-org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteCompare
-org.apache.hadoop.hbase.regionserver.RegionOpeningState
+org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
+org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult
+org.apache.hadoop.hbase.regionserver.FlushType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/class-use/ThroughputController.html
---

[19/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterRpcServices.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterRpcServices.html
index 4bc8cf7..cdc1d40 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterRpcServices.html
@@ -131,13 +131,13 @@
 
 
 void
-RegionNormalizer.setMasterRpcServices(MasterRpcServices masterRpcServices)
-Set the master RPC service.
-
+SimpleRegionNormalizer.setMasterRpcServices(MasterRpcServices masterRpcServices) 
 
 
 void
-SimpleRegionNormalizer.setMasterRpcServices(MasterRpcServices masterRpcServices) 
+RegionNormalizer.setMasterRpcServices(MasterRpcServices masterRpcServices)
+Set the master RPC service.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
index 8a292c6..933d03c 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
@@ -242,19 +242,19 @@
 
 
 private MasterServices
-TableNamespaceManager.masterServices 
+MasterCoprocessorHost.masterServices 
 
 
 private MasterServices
-ClusterSchemaServiceImpl.masterServices 
+MasterCoprocessorHost.MasterEnvironment.masterServices 
 
 
 private MasterServices
-MasterCoprocessorHost.masterServices 
+ClusterSchemaServiceImpl.masterServices 
 
 
 private MasterServices
-MasterCoprocessorHost.MasterEnvironment.masterServices 
+TableNamespaceManager.masterServices 
 
 
 protected MasterServices
@@ -266,15 +266,15 @@
 
 
 private MasterServices
-MasterFileSystem.services 
+CatalogJanitor.services 
 
 
 private MasterServices
-ServerManager.services 
+MasterFileSystem.services 
 
 
 private MasterServices
-CatalogJanitor.services 
+ServerManager.services 
 
 
 
@@ -428,11 +428,11 @@
 
 
 private MasterServices
-StochasticLoadBalancer.LocalityCostFunction.services 
+RegionLocationFinder.services 
 
 
 private MasterServices
-RegionLocationFinder.services 
+StochasticLoadBalancer.LocalityCostFunction.services 
 
 
 
@@ -452,16 +452,16 @@
 StochasticLoadBalancer.setMasterServices(MasterServices masterServices) 
 
 
-(package private) void
-StochasticLoadBalancer.LocalityBasedCandidateGenerator.setServices(MasterServices services) 
+void
+RegionLocationFinder.setServices(MasterServices services) 
 
 
 (package private) void
-StochasticLoadBalancer.LocalityCostFunction.setServices(MasterServices srvc) 
+StochasticLoadBalancer.LocalityBasedCandidateGenerator.setServices(MasterServices services) 
 
 
-void
-RegionLocationFinder.setServices(MasterServices services) 
+(package private) void
+StochasticLoadBalancer.LocalityCostFunction.setServices(MasterServices srvc) 
 
 
 
@@ -493,13 +493,13 @@
 
 
 
-private MasterServices
-DispatchMergingRegionHandler.masterServices 
-
-
 protected MasterServices
 TableEventHandler.masterServices 
 
+
+private MasterServices
+DispatchMergingRegionHandler.masterServices 
+
 
 private MasterServices
 EnableTableHandler.services 
@@ -562,13 +562,13 @@
 
 
 void
-RegionNormalizer.setMasterServices(MasterServices masterServices)
+SimpleRegionNormalizer.setMasterServices(MasterServices masterServices)
 Set the master service.
 
 
 
 void
-SimpleRegionNormalizer.setMasterServices(MasterServices masterServices)
+RegionNormalizer.setMasterServices(MasterServices masterServices)
 Set the master service.
 
 
@@ -629,13 +629,13 @@
 
 
 
-protected MasterServices
-TakeSnapshotHandler.master 
-
-
 private MasterServices
 SnapshotManager.master 
 
+
+protected MasterServices
+TakeSnapshotHandler.master 
+
 
 private MasterServices
 MasterSnapshotVerifier.services 
@@ -739,17 +739,17 @@
 
 
 
+void
+MasterProcedureManagerHost.initialize(MasterServices master,
+MetricsMaster metricsMaster) 
+
+
 abstract void
 MasterProcedureManager.initialize(MasterServices master,
 MetricsMaster metricsMaster)
 Initialize a globally barriered procedure for master.
 
 
-
-void
-MasterProcedureManagerHost.initialize(MasterServices master,
-MetricsMaster metricsMaster) 
-
 
 
 
@@ -827,11 +827,11 @@
 
 
 private MasterServices
-RSGroupAdminServer.master 
+RSGroupAdminEndpoint.master 
 
 
 private MasterServices
-RSGroupAdminEndpoint.master 
+RSGroupAdminServer.master 
 
 
 private MasterServices

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/master/class-use/Metric

[01/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site c4be37af2 -> 31b3fd50a


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/util/class-use/RetryCounter.RetryConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/RetryCounter.RetryConfig.html
 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/RetryCounter.RetryConfig.html
index 2c14d06..509704f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/RetryCounter.RetryConfig.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/RetryCounter.RetryConfig.html
@@ -96,11 +96,11 @@
 
 
 private RetryCounter.RetryConfig
-RetryCounterFactory.retryConfig 
+RetryCounter.retryConfig 
 
 
 private RetryCounter.RetryConfig
-RetryCounter.retryConfig 
+RetryCounterFactory.retryConfig 
 
 
 



[14/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerContext.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerContext.html
index 62fa6d9..3a79826 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerContext.html
@@ -121,11 +121,11 @@
 
 
 HeapMemoryManager.TunerResult
-NoOpHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
+DefaultHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
 
 
 HeapMemoryManager.TunerResult
-DefaultHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
+NoOpHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
 
 
 HeapMemoryManager.TunerResult

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerResult.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerResult.html
index ac5d1d8..4519a30 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HeapMemoryManager.TunerResult.html
@@ -96,11 +96,11 @@
 
 
 private static HeapMemoryManager.TunerResult
-NoOpHeapMemoryTuner.NO_OP_TUNER_RESULT 
+DefaultHeapMemoryTuner.NO_OP_TUNER_RESULT 
 
 
 private static HeapMemoryManager.TunerResult
-DefaultHeapMemoryTuner.NO_OP_TUNER_RESULT 
+NoOpHeapMemoryTuner.NO_OP_TUNER_RESULT 
 
 
 private HeapMemoryManager.TunerResult
@@ -117,11 +117,11 @@
 
 
 HeapMemoryManager.TunerResult
-NoOpHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
+DefaultHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
 
 
 HeapMemoryManager.TunerResult
-DefaultHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
+NoOpHeapMemoryTuner.tune(HeapMemoryManager.TunerContext context) 
 
 
 HeapMemoryManager.TunerResult

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/InternalScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/InternalScanner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/InternalScanner.html
index 625d98c..de7aab8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/InternalScanner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/InternalScanner.html
@@ -800,14 +800,14 @@
 
 
 InternalScanner
-Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 
 
 
 InternalScanner
-StripeCompactor.StripeInternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
+Compactor.InternalScannerFactory.createScanner(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List scanners,
   ScanType scanType,
   Compactor.FileDetails fd,
   long smallestReadPoint) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/KeyValueScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/KeyValueScanner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/KeyValueScanner.html
index 82df45b..8965540 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/KeyValueScanner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/KeyValueScanner.html
@@ -434,19 +434,19 @@
 
 
 protected KeyValueScanner
-HMobStore.createScanner(Scan scan,
+HStore.createScanner(Scan scan,
   h

[25/51] [partial] hbase-site git commit: Published site at 57e1dbc8a65071df3ccd4e0c1f7d124d6a03f0ed.

2016-04-22 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index 8c1e7ae..256b142 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,15 +152,15 @@
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env 
+MultiRowMutationEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env 
+BaseRowProcessorEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-BaseRowProcessorEndpoint.env 
+AggregateImplementation.env 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
index ca4c0fe..6ecfef4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
@@ -297,14 +297,6 @@
 
 
 void
-ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
-ForeignException cause)
-Notify the members that the coordinator has aborted the 
procedure and that it should release
- barrier resources.
-
-
-
-void
 ZKProcedureCoordinatorRpcs.sendAbortToMembers(Procedure proc,
 ForeignException ee)
 This is the abort message being sent by the coordinator to 
member
@@ -313,6 +305,14 @@
  coordinator.
 
 
+
+void
+ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
+ForeignException cause)
+Notify the members that the coordinator has aborted the 
procedure and that it should release
+ barrier resources.
+
+
 
 void
 ZKProcedureMemberRpcs.sendMemberAborted(Subprocedure sub,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31b3fd50/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 4ae36d5..c59d0dd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -148,25 +148,25 @@
 ClusterId.parseFrom(byte[] bytes) 
 
 
-static HRegionInfo
-HRegionInfo.parseFrom(byte[] bytes) 
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[] bytes) 
 
 
 static HTableDescriptor
 HTableDescriptor.parseFrom(byte[] bytes) 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[] bytes) 
+static HRegionInfo
+HRegionInfo.parseFrom(byte[] bytes) 
 
 
-static SplitLogTask
-SplitLogTask.parseFrom(byte[] data) 
-
-
 static TableDescriptor
 TableDescriptor.parseFrom(byte[] bytes) 
 
+
+static SplitLogTask
+SplitLogTask.parseFrom(byte[] data) 
+
 
 static HRegionInfo
 HRegionInfo.parseFrom(byte[] bytes,
@@ -257,145 +257,145 @@
 ByteArrayComparable.parseFrom(byte[] pbBytes) 
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[] pbBytes) 
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPaginationFilter
-ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[] pbBytes) 
+static BitComparator
+BitComparator.parseFrom(byte[] pbBytes) 
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[] pbBytes) 
-
-
 static MultipleColumnPrefixFilter
 MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
 
-
-static FirstKeyOnlyFilter
-FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
-
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
+static RowFilter
+RowFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static FamilyFilter
+FamilyFilter.parseFrom(byte[] pbBytes) 
 
 
-static TimestampsFilter
-TimestampsFilter.parseFrom(byte[] pbBytes) 
+static SkipFilter
+SkipFilter.parseFrom(byte[] pbBytes) 
 
 
-static FilterList
-FilterList.parseFrom(byte[] pbBytes) 
-
-
 static PrefixFilter
 PrefixFilter.parseFrom(byte[] pbBytes) 
 
+
+static Fi

hbase git commit: HBASE-15690 Add utility to get current username

2016-04-22 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 f17439a58 -> 613799661


HBASE-15690 Add utility to get current username

Summary:
Add a class to fine the username of the current process.
It will only call out once and is multithread safe

Test Plan: Unit Test

Differential Revision: https://reviews.facebook.net/D57081


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/61379966
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/61379966
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/61379966

Branch: refs/heads/HBASE-14850
Commit: 6137996618d9cb52db27a559c27aecd3d3673e51
Parents: f17439a
Author: Elliott Clark 
Authored: Thu Apr 21 21:17:11 2016 -0700
Committer: Elliott Clark 
Committed: Fri Apr 22 11:13:00 2016 -0700

--
 hbase-native-client/utils/BUCK  | 40 +
 hbase-native-client/utils/user-util-test.cc | 34 +++
 hbase-native-client/utils/user-util.cc  | 55 
 hbase-native-client/utils/user-util.h   | 37 
 4 files changed, 166 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/61379966/hbase-native-client/utils/BUCK
--
diff --git a/hbase-native-client/utils/BUCK b/hbase-native-client/utils/BUCK
new file mode 100644
index 000..2b65b12
--- /dev/null
+++ b/hbase-native-client/utils/BUCK
@@ -0,0 +1,40 @@
+##
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+cxx_library(name="utils",
+exported_headers=[
+"user-util.h",
+],
+srcs=[
+"user-util.cc",
+],
+deps=[
+  '//third-party:folly',
+],
+visibility=[
+'PUBLIC',
+],
+tests=[
+  ":user-util-test"
+],)
+cxx_test(name="user-util-test",
+ srcs=[
+ "user-util-test.cc",
+ ],
+ deps=[
+ ":utils",
+ ],)

http://git-wip-us.apache.org/repos/asf/hbase/blob/61379966/hbase-native-client/utils/user-util-test.cc
--
diff --git a/hbase-native-client/utils/user-util-test.cc 
b/hbase-native-client/utils/user-util-test.cc
new file mode 100644
index 000..2a7434f
--- /dev/null
+++ b/hbase-native-client/utils/user-util-test.cc
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include 
+#include 
+#include 
+
+#include "utils/user-util.h"
+
+using namespace std;
+using namespace hbase;
+
+TEST(TestUserUtil, TestGetSomething) {
+  UserUtil u_util;
+  string name = u_util.user_name();
+
+  ASSERT_GT(name.length(), 0);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/61379966/hbase-native-client/utils/user-util.cc
--
diff --git a/hbase-native-client/utils/user-util.cc 
b/hbase-native-client/utils/user-util.cc
new file mode 100644
index 000..c4427e3
--- /dev/null
+++ b/hbase-native-client/utils/user-util.cc
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with t

[1/2] hbase git commit: HBASE-15688 Use MasterServices directly instead of casting to HMaster when possible

2016-04-22 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/master 57e1dbc8a -> 1ecb10ce0


HBASE-15688 Use MasterServices directly instead of casting to HMaster when 
possible


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ecb10ce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ecb10ce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ecb10ce

Branch: refs/heads/master
Commit: 1ecb10ce071a4b3aa6cd52bd346bf3c49437ed8d
Parents: bfca2a4
Author: Matteo Bertozzi 
Authored: Fri Apr 22 09:31:56 2016 -0700
Committer: Matteo Bertozzi 
Committed: Fri Apr 22 10:15:58 2016 -0700

--
 .../hadoop/hbase/master/MasterServices.java |   9 +-
 .../hadoop/hbase/master/ServerManager.java  |   2 +-
 .../master/procedure/MasterProcedureEnv.java|  10 +-
 .../hbase/master/MockNoopMasterServices.java| 329 +++
 .../hadoop/hbase/master/TestCatalogJanitor.java | 247 +-
 5 files changed, 343 insertions(+), 254 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1ecb10ce/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index d6802fe..d095183 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -101,7 +101,7 @@ public interface MasterServices extends Server {
* @return Master's instance of {@link MasterQuotaManager}
*/
   MasterQuotaManager getMasterQuotaManager();
-  
+
   /**
* @return Master's instance of {@link RegionNormalizer}
*/
@@ -288,6 +288,11 @@ public interface MasterServices extends Server {
   ) throws IOException;
 
   /**
+   * @return true if master is the active one
+   */
+  boolean isActiveMaster();
+
+  /**
* @return true if master is initialized
*/
   boolean isInitialized();
@@ -297,7 +302,7 @@ public interface MasterServices extends Server {
* @param procId ID of the procedure
* @param mayInterruptIfRunning if the proc completed at least one step, 
should it be aborted?
* @return true if aborted, false if procedure already completed or does not 
exist
-   * @throws IOException 
+   * @throws IOException
*/
   public boolean abortProcedure(final long procId, final boolean 
mayInterruptIfRunning)
   throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/1ecb10ce/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
index dabef71..d69c7aa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
@@ -430,7 +430,7 @@ public class ServerManager {
 }
 // remove dead server with same hostname and port of newly checking in rs 
after master
 // initialization.See HBASE-5916 for more information.
-if ((this.services == null || ((HMaster) this.services).isInitialized())
+if ((this.services == null || this.services.isInitialized())
 && this.deadservers.cleanPreviousInstance(serverName)) {
   // This server has now become alive after we marked it as dead.
   // We removed it's previous entry from the dead list to reflect it.

http://git-wip-us.apache.org/repos/asf/hbase/blob/1ecb10ce/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
index 090b8cc..3b5eced 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
@@ -47,9 +47,9 @@ public class MasterProcedureEnv {
 
   @InterfaceAudience.Private
   public static class WALStoreLeaseRecovery implements 
WALProcedureStore.LeaseRecovery {
-private final HMaster master;
+private final MasterServices master;
 
-public WALStoreLeaseRecovery(final HMaster master) {
+public WALStoreLeaseRecovery(final MasterServices master) {
   this.master = master;
 }
 
@@ -70,9 +70,9 @@ publi

[2/2] hbase git commit: HBASE-15579 Procedure v2 - Remove synchronized around nonce in Procedure submit

2016-04-22 Thread mbertozzi
HBASE-15579 Procedure v2 - Remove synchronized around nonce in Procedure submit


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bfca2a46
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bfca2a46
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bfca2a46

Branch: refs/heads/master
Commit: bfca2a460694bb2abc720a582318bee4ddc29c0f
Parents: 57e1dbc
Author: Matteo Bertozzi 
Authored: Fri Apr 22 10:13:13 2016 -0700
Committer: Matteo Bertozzi 
Committed: Fri Apr 22 10:15:58 2016 -0700

--
 .../hbase/procedure2/ProcedureExecutor.java | 43 
 .../store/wal/TestWALProcedureStore.java| 26 
 2 files changed, 42 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bfca2a46/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 3f0ba37..f43b65f 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -635,34 +635,23 @@ public class ProcedureExecutor {
 Preconditions.checkArgument(lastProcId.get() >= 0);
 Preconditions.checkArgument(!proc.hasParent());
 
-Long currentProcId;
-
-// The following part of the code has to be synchronized to prevent 
multiple request
-// with the same nonce to execute at the same time.
-synchronized (this) {
-  // Check whether the proc exists.  If exist, just return the proc id.
-  // This is to prevent the same proc to submit multiple times (it could 
happen
-  // when client could not talk to server and resubmit the same request).
-  NonceKey noncekey = null;
-  if (nonce != HConstants.NO_NONCE) {
-noncekey = new NonceKey(nonceGroup, nonce);
-currentProcId = nonceKeysToProcIdsMap.get(noncekey);
-if (currentProcId != null) {
-  // Found the proc
-  return currentProcId;
-}
+// Initialize the Procedure ID
+long currentProcId = nextProcId();
+proc.setProcId(currentProcId);
+
+// Check whether the proc exists.  If exist, just return the proc id.
+// This is to prevent the same proc to submit multiple times (it could 
happen
+// when client could not talk to server and resubmit the same request).
+if (nonce != HConstants.NO_NONCE) {
+  NonceKey noncekey = new NonceKey(nonceGroup, nonce);
+  proc.setNonceKey(noncekey);
+
+  Long oldProcId = nonceKeysToProcIdsMap.putIfAbsent(noncekey, 
currentProcId);
+  if (oldProcId != null) {
+// Found the proc
+return oldProcId.longValue();
   }
-
-  // Initialize the Procedure ID
-  currentProcId = nextProcId();
-  proc.setProcId(currentProcId);
-
-  // This is new procedure. Set the noncekey and insert into the map.
-  if (noncekey != null) {
-proc.setNonceKey(noncekey);
-nonceKeysToProcIdsMap.put(noncekey, currentProcId);
-  }
-} // end of synchronized (this)
+}
 
 // Commit the transaction
 store.insert(proc, null);

http://git-wip-us.apache.org/repos/asf/hbase/blob/bfca2a46/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
--
diff --git 
a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
 
b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
index b9a439a..88c85ba 100644
--- 
a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
+++ 
b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
@@ -208,6 +208,32 @@ public class TestWALProcedureStore {
   }
 
   @Test
+  public void testProcIdHoles() throws Exception {
+// Insert
+for (int i = 0; i < 100; i += 2) {
+  procStore.insert(new TestProcedure(i), null);
+  if (i > 0 && (i % 10) == 0) {
+LoadCounter loader = new LoadCounter();
+storeRestart(loader);
+assertEquals(0, loader.getCorruptedCount());
+assertEquals((i / 2) + 1, loader.getLoadedCount());
+  }
+}
+assertEquals(10, procStore.getActiveLogs().size());
+
+// Delete
+for (int i = 0; i < 100; i += 2) {
+  procStore.delete(i);
+}
+assertEquals(1, procStore.getActiveLogs().size());
+
+LoadCounter loader = new LoadCo

hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 - drop deleted files

2016-04-22 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 c61f7c342 -> 182d66f8e


HBASE-14123 HBase Backup/Restore Phase 2 - drop deleted files


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/182d66f8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/182d66f8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/182d66f8

Branch: refs/heads/HBASE-7912
Commit: 182d66f8e8f7795ff4b7532690a7828f228dee64
Parents: c61f7c3
Author: tedyu 
Authored: Fri Apr 22 09:30:21 2016 -0700
Committer: tedyu 
Committed: Fri Apr 22 09:30:21 2016 -0700

--
 .../hadoop/hbase/backup/impl/BackupContext.java | 402 --
 .../hadoop/hbase/backup/impl/BackupStatus.java  | 105 ---
 .../backup/impl/FullTableBackupProcedure.java   | 745 ---
 .../impl/IncrementalTableBackupProcedure.java   | 325 
 4 files changed, 1577 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/182d66f8/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupContext.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupContext.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupContext.java
deleted file mode 100644
index 06e66dc..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupContext.java
+++ /dev/null
@@ -1,402 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.backup.impl;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.backup.BackupType;
-import org.apache.hadoop.hbase.backup.HBackupFileSystem;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.BackupProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupContext.Builder;
-import 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.TableBackupStatus;
-
-/**
- * An object to encapsulate the information for each backup request
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class BackupContext {
-
-  public Map getBackupStatusMap() {
-return backupStatusMap;
-  }
-
-  public void setBackupStatusMap(Map backupStatusMap) 
{
-this.backupStatusMap = backupStatusMap;
-  }
-
-  public HashMap> getTableSetTimestampMap() {
-return tableSetTimestampMap;
-  }
-
-  public void setTableSetTimestampMap(
-  HashMap> tableSetTimestampMap) {
-this.tableSetTimestampMap = tableSetTimestampMap;
-  }
-
-  public String getHlogTargetDir() {
-return hlogTargetDir;
-  }
-
-  public void setType(BackupType type) {
-this.type = type;
-  }
-
-  public void setTargetRootDir(String targetRootDir) {
-this.targetRootDir = targetRootDir;
-  }
-
-  public void setTotalBytesCopied(long totalBytesCopied) {
-this.totalBytesCopied = totalBytesCopied;
-  }
-
-  // backup status flag
-  public static enum BackupState {
-RUNNING, COMPLETE, FAILED, CANCELLED;
-  }
-
-  public void setCancelled(boolean cancelled) {
-this.state = BackupState.CANCELLED;;
-  }
-
-  // backup phase
-  // for overall backup (for table list, some table may go online, while some 
may go offline)
-  protected static enum BackupPhase {
-SNAPSHOTCOPY, INCREMENTAL_COPY, STORE_MANIFEST;
-  }
-
-  // backup id: a timestamp when we request the backup
-  private String backupId;
-
-  // backup type, full or incremental
-  private BackupType type;
-
-  // target root directory for storing the backup files
-  private String targetRootDir;
-
-  // overall backup state
-  private BackupState state;
-
-  // overall backup phase
-  

[4/6] hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)

2016-04-22 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-protocol/src/main/protobuf/Backup.proto
--
diff --git a/hbase-protocol/src/main/protobuf/Backup.proto 
b/hbase-protocol/src/main/protobuf/Backup.proto
index c17ad06..7d1ec4b 100644
--- a/hbase-protocol/src/main/protobuf/Backup.proto
+++ b/hbase-protocol/src/main/protobuf/Backup.proto
@@ -77,11 +77,8 @@ message BackupManifest {
   repeated TableName table_list = 4;
   required uint64 start_ts = 5;
   required uint64 complete_ts = 6;
-  required int64 total_bytes = 7;
-  optional int64 log_bytes  = 8;
-  repeated TableServerTimestamp tst_map = 9;
-  repeated BackupImage dependent_backup_image = 10;
-  required bool compacted = 11; 
+  repeated TableServerTimestamp tst_map = 7;
+  repeated BackupImage dependent_backup_image = 8;
 }
 
 message TableBackupStatus {
@@ -90,7 +87,7 @@ message TableBackupStatus {
   optional string snapshot = 3;
 }
 
-message BackupContext {
+message BackupInfo {
   required string backup_id = 1;
   required BackupType type = 2;
   required string target_root_dir = 3;
@@ -100,9 +97,10 @@ message BackupContext {
   repeated TableBackupStatus table_backup_status = 7;
   optional uint64  start_ts = 8;
   optional uint64  end_ts = 9;
-  optional int64  total_bytes_copied = 10;
-  optional string hlog_target_dir = 11;
-  optional uint32 progress = 12; 
+  optional uint32 progress = 10; 
+  optional string job_id = 11;
+  required uint32 workers_number = 12;
+  required uint64 bandwidth = 13;
   
   enum BackupState {
 WAITING = 0;
@@ -123,7 +121,7 @@ message BackupContext {
 }
 
 message BackupProcContext {
-  required BackupContext ctx = 1;
+  required BackupInfo ctx = 1;
   repeated ServerTimestamp server_timestamp = 2;
 }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupClient.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupClient.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupClient.java
new file mode 100644
index 000..7b0b454
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupClient.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.backup;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.hbase.backup.util.BackupSet;
+
+public interface BackupClient extends Configurable{
+
+  /**
+   * Describe backup image command
+   * @param backupId - backup id
+   * @return backup info
+   * @throws IOException
+   */
+  public BackupInfo getBackupInfo(String backupId) throws IOException;
+
+  /**
+   * Show backup progress command
+   * @param backupId - backup id (may be null)
+   * @return backup progress (0-100%), -1 if no active sessions
+   *  or session not found
+   * @throws IOException
+   */
+  public int getProgress(String backupId) throws IOException;
+
+  /**
+   * Delete backup image command
+   * @param backupIds - backup id
+   * @return total number of deleted sessions
+   * @throws IOException
+   */
+  public int deleteBackups(String[] backupIds) throws IOException;
+
+//  /**
+//  TODO: Phase 3
+//   * Cancel current active backup command
+//   * @param backupId - backup id
+//   * @throws IOException
+//   */
+//  public void cancelBackup(String backupId) throws IOException;
+
+  /**
+   * Show backup history command
+   * @param n - last n backup sessions
+   * @throws IOException
+   */
+  public List getHistory(int n) throws IOException;
+
+  /**
+   * Backup sets list command - list all backup sets. Backup set is 
+   * a named group of tables. 
+   * @throws IOException
+   */
+  public List listBackupSets() throws IOException;
+
+  /**
+   * Backup set describe command. Shows list of tables in
+   * this particular backup set.
+   * @param name set name
+   * @return backup set description or null
+   * @throws IOException
+   */
+  public BackupSet getBackupSet(String name) throws IOExceptio

[5/6] hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)

2016-04-22 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
index a1a1a78..4699c81 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
@@ -4412,85 +4412,55 @@ public final class BackupProtos {
  */
 long getCompleteTs();
 
-// required int64 total_bytes = 7;
+// repeated .hbase.pb.TableServerTimestamp tst_map = 7;
 /**
- * required int64 total_bytes = 7;
- */
-boolean hasTotalBytes();
-/**
- * required int64 total_bytes = 7;
- */
-long getTotalBytes();
-
-// optional int64 log_bytes = 8;
-/**
- * optional int64 log_bytes = 8;
- */
-boolean hasLogBytes();
-/**
- * optional int64 log_bytes = 8;
- */
-long getLogBytes();
-
-// repeated .hbase.pb.TableServerTimestamp tst_map = 9;
-/**
- * repeated .hbase.pb.TableServerTimestamp tst_map = 9;
+ * repeated .hbase.pb.TableServerTimestamp tst_map = 7;
  */
 
java.util.List
 
 getTstMapList();
 /**
- * repeated .hbase.pb.TableServerTimestamp tst_map = 9;
+ * repeated .hbase.pb.TableServerTimestamp tst_map = 7;
  */
 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.TableServerTimestamp 
getTstMap(int index);
 /**
- * repeated .hbase.pb.TableServerTimestamp tst_map = 9;
+ * repeated .hbase.pb.TableServerTimestamp tst_map = 7;
  */
 int getTstMapCount();
 /**
- * repeated .hbase.pb.TableServerTimestamp tst_map = 9;
+ * repeated .hbase.pb.TableServerTimestamp tst_map = 7;
  */
 java.util.List
 
 getTstMapOrBuilderList();
 /**
- * repeated .hbase.pb.TableServerTimestamp tst_map = 9;
+ * repeated .hbase.pb.TableServerTimestamp tst_map = 7;
  */
 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.TableServerTimestampOrBuilder
 getTstMapOrBuilder(
 int index);
 
-// repeated .hbase.pb.BackupImage dependent_backup_image = 10;
+// repeated .hbase.pb.BackupImage dependent_backup_image = 8;
 /**
- * repeated .hbase.pb.BackupImage dependent_backup_image = 10;
+ * repeated .hbase.pb.BackupImage dependent_backup_image = 8;
  */
 
java.util.List
 
 getDependentBackupImageList();
 /**
- * repeated .hbase.pb.BackupImage dependent_backup_image = 10;
+ * repeated .hbase.pb.BackupImage dependent_backup_image = 8;
  */
 org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage 
getDependentBackupImage(int index);
 /**
- * repeated .hbase.pb.BackupImage dependent_backup_image = 10;
+ * repeated .hbase.pb.BackupImage dependent_backup_image = 8;
  */
 int getDependentBackupImageCount();
 /**
- * repeated .hbase.pb.BackupImage dependent_backup_image = 10;
+ * repeated .hbase.pb.BackupImage dependent_backup_image = 8;
  */
 java.util.List 
 getDependentBackupImageOrBuilderList();
 /**
- * repeated .hbase.pb.BackupImage dependent_backup_image = 10;
+ * repeated .hbase.pb.BackupImage dependent_backup_image = 8;
  */
 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder 
getDependentBackupImageOrBuilder(
 int index);
-
-// required bool compacted = 11;
-/**
- * required bool compacted = 11;
- */
-boolean hasCompacted();
-/**
- * required bool compacted = 11;
- */
-boolean getCompacted();
   }
   /**
* Protobuf type {@code hbase.pb.BackupManifest}
@@ -4582,37 +4552,22 @@ public final class BackupProtos {
   completeTs_ = input.readUInt64();
   break;
 }
-case 56: {
-  bitField0_ |= 0x0020;
-  totalBytes_ = input.readInt64();
-  break;
-}
-case 64: {
-  bitField0_ |= 0x0040;
-  logBytes_ = input.readInt64();
-  break;
-}
-case 74: {
-  if (!((mutable_bitField0_ & 0x0100) == 0x0100)) {
+case 58: {
+  if (!((mutable_bitField0_ & 0x0040) == 0x0040)) {
 tstMap_ = new 
java.util.ArrayList();
-mutable_bitField0_ |= 0x0100;
+mutable_bitField0_ |= 0x0040;
   }
   
tstMap_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.TableServerTimestamp.PARSER,
 extensionRegistry));
   break;
 }
-case 82: {
-  if (!((m

[2/6] hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)

2016-04-22 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
index c0c5220..f16d213 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
@@ -22,11 +22,12 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map.Entry;
-import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -52,7 +53,6 @@ public final class RestoreClientImpl implements RestoreClient 
{
 
   private static final Log LOG = LogFactory.getLog(RestoreClientImpl.class);
   private Configuration conf;
-  private Set lastRestoreImagesSet;
 
   public RestoreClientImpl() {
   }
@@ -111,11 +111,10 @@ public final class RestoreClientImpl implements 
RestoreClient {
   checkTargetTables(tTableArray, isOverwrite);
 
   // start restore process
-  Set restoreImageSet =
-  restoreStage(backupManifestMap, sTableArray, tTableArray, 
autoRestore);
+  
+  restoreStage(backupManifestMap, sTableArray, tTableArray, autoRestore);
 
   LOG.info("Restore for " + Arrays.asList(sTableArray) + " are 
successful!");
-  lastRestoreImagesSet = restoreImageSet;
 
 } catch (IOException e) {
   LOG.error("ERROR: restore failed with error: " + e.getMessage());
@@ -126,13 +125,6 @@ public final class RestoreClientImpl implements 
RestoreClient {
 return false;
   }
 
-  /**
-   * Get last restore image set. The value is globally set for the latest 
finished restore.
-   * @return the last restore image set
-   */
-  public Set getLastRestoreImagesSet() {
-return lastRestoreImagesSet;
-  }
 
   private  boolean validate(HashMap 
backupManifestMap)
   throws IOException {
@@ -147,10 +139,6 @@ public final class RestoreClientImpl implements 
RestoreClient {
 imageSet.addAll(depList);
   }
 
-  // todo merge
-  LOG.debug("merge will be implemented in future jira");
-  // BackupUtil.clearMergedImages(table, imageSet, conf);
-
   LOG.info("Dependent image(s) from old to new:");
   for (BackupImage image : imageSet) {
 String imageDir =
@@ -164,6 +152,7 @@ public final class RestoreClientImpl implements 
RestoreClient {
 LOG.info("Backup image: " + image.getBackupId() + " for '" + table + 
"' is available");
   }
 }
+
 return isValid;
   }
 
@@ -189,7 +178,7 @@ public final class RestoreClientImpl implements 
RestoreClient {
   }
 } else {
   LOG.info("HBase table " + tableName
-  + " does not exist. It will be create during backup process");
+  + " does not exist. It will be created during restore process");
 }
   }
 }
@@ -223,54 +212,57 @@ public final class RestoreClientImpl implements 
RestoreClient {
* @return set of BackupImages restored
* @throws IOException exception
*/
-  private Set restoreStage(
+  private void restoreStage(
 HashMap backupManifestMap, TableName[] 
sTableArray,
 TableName[] tTableArray, boolean autoRestore) throws IOException {
 TreeSet restoreImageSet = new TreeSet();
-
-for (int i = 0; i < sTableArray.length; i++) {
-  restoreImageSet.clear();
-  TableName table = sTableArray[i];
-  BackupManifest manifest = backupManifestMap.get(table);
-  if (autoRestore) {
-// Get the image list of this backup for restore in time order from old
-// to new.
-TreeSet restoreList =
-new TreeSet(manifest.getDependentListByTable(table));
-LOG.debug("need to clear merged Image. to be implemented in future 
jira");
-
-for (BackupImage image : restoreList) {
+try {
+  for (int i = 0; i < sTableArray.length; i++) {
+TableName table = sTableArray[i];
+BackupManifest manifest = backupManifestMap.get(table);
+if (autoRestore) {
+  // Get the image list of this backup for restore in time order from 
old
+  // to new.
+  List list = new ArrayList();
+  list.add(manifest.getBackupImage());
+  List depList = manifest.getDependentListByTable(table);
+  list.addAll(depList);
+  TreeSet restoreList = new TreeSet(list);
+  LOG.debug("need to clear merged Image. to be implemented in future 
jira");
+  restoreImages(restoreList.iterator(),

[3/6] hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)

2016-04-22 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
index b4d47d3..ba09c8d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hbase.backup.impl;
 
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
 import java.io.Closeable;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -27,10 +25,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Set;
-import java.util.concurrent.CancellationException;
-import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
@@ -42,18 +37,23 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupInfo;
 import org.apache.hadoop.hbase.backup.BackupType;
 import org.apache.hadoop.hbase.backup.HBackupFileSystem;
-import org.apache.hadoop.hbase.backup.impl.BackupContext.BackupState;
+import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
 import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
-import org.apache.hadoop.hbase.backup.impl.BackupUtil.BackupCompleteData;
+import org.apache.hadoop.hbase.backup.master.BackupController;
 import org.apache.hadoop.hbase.backup.master.BackupLogCleaner;
+import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
+import 
org.apache.hadoop.hbase.backup.regionserver.LogRollRegionServerProcedureManager;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 /**
  * Handles backup requests on server-side, creates backup context records in 
hbase:backup
  * to keep track backup. The timestamps kept in hbase:backup table will be 
used for future
@@ -65,7 +65,7 @@ public class BackupManager implements Closeable {
   private static final Log LOG = LogFactory.getLog(BackupManager.class);
 
   private Configuration conf = null;
-  private BackupContext backupContext = null;
+  private BackupInfo backupContext = null;
 
   private ExecutorService pool = null;
 
@@ -86,12 +86,21 @@ public class BackupManager implements Closeable {
   HConstants.BACKUP_ENABLE_KEY + " setting.");
 }
 this.conf = conf;
-this.conn = ConnectionFactory.createConnection(conf); // TODO: get 
Connection from elsewhere?
+this.conn = ConnectionFactory.createConnection(conf);
 this.systemTable = new BackupSystemTable(conn);
+ 
 Runtime.getRuntime().addShutdownHook(new ExitHandler());
+
   }
 
   /**
+   * Return backup context
+   */
+  protected BackupInfo getBackupContext()
+  {
+return backupContext;
+  }
+  /**
* This method modifies the master's configuration in order to inject 
backup-related features
* @param conf configuration
*/
@@ -99,16 +108,61 @@ public class BackupManager implements Closeable {
 if (!isBackupEnabled(conf)) {
   return;
 }
+// Add WAL archive cleaner plug-in
 String plugins = conf.get(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS);
 String cleanerClass = BackupLogCleaner.class.getCanonicalName();
 if (!plugins.contains(cleanerClass)) {
   conf.set(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + 
cleanerClass);
+}
+
+String classes = conf.get("hbase.procedure.master.classes");
+String masterProcedureClass = 
LogRollMasterProcedureManager.class.getName();
+if(classes == null){
+  conf.set("hbase.procedure.master.classes", masterProcedureClass);
+} else if(!classes.contains(masterProcedureClass)){
+  conf.set("hbase.procedure.master.classes", classes 
+","+masterProcedureClass);
+}
+ 
+// Set Master Observer - Backup Controller
+classes = conf.get("hbase.coprocessor.master.classes");
+String observerClass = BackupController.class.getName();
+if(classes == null){
+  conf.set("hbase.coprocessor.master.classes", observerClass);
+} else if(!classes.contains(observerClass)){
+  conf.set("hbase.coprocessor.mas

[1/6] hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)

2016-04-22 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 0ca86556e -> c61f7c342


http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
index 02fcbba..736b8a5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
@@ -27,22 +27,24 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.wal.WALFactory;
-import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WAL.Entry;
 import org.apache.hadoop.hbase.wal.WAL.Reader;
+import org.apache.hadoop.hbase.wal.WALFactory;
+import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Simple {@link InputFormat} for {@link org.apache.hadoop.hbase.wal.WAL} 
files.
@@ -231,21 +233,31 @@ public class WALInputFormat extends InputFormat {
   List getSplits(final JobContext context, final String startKey, 
final String endKey)
   throws IOException, InterruptedException {
 Configuration conf = context.getConfiguration();
-Path inputDir = new 
Path(conf.get("mapreduce.input.fileinputformat.inputdir"));
+
+Path[] inputPaths = getInputPaths(conf);
 
 long startTime = conf.getLong(startKey, Long.MIN_VALUE);
 long endTime = conf.getLong(endKey, Long.MAX_VALUE);
 
-FileSystem fs = inputDir.getFileSystem(conf);
-List files = getFiles(fs, inputDir, startTime, endTime);
-
-List splits = new ArrayList(files.size());
-for (FileStatus file : files) {
+FileSystem fs = FileSystem.get(conf);
+
+List allFiles = new ArrayList();
+for(Path inputPath: inputPaths){
+  List files = getFiles(fs, inputPath, startTime, endTime);
+  allFiles.addAll(files);
+}
+List splits = new ArrayList(allFiles.size());
+for (FileStatus file : allFiles) {
   splits.add(new WALSplit(file.getPath().toString(), file.getLen(), 
startTime, endTime));
 }
 return splits;
   }
 
+  private Path[] getInputPaths(Configuration conf) {
+String inpDirs = conf.get("mapreduce.input.fileinputformat.inputdir");
+return StringUtils.stringToPath(inpDirs.split(","));
+  }
+
   private List getFiles(FileSystem fs, Path dir, long startTime, 
long endTime)
   throws IOException {
 List result = new ArrayList();

http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index 2ceeda5..4cdbad3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -47,6 +47,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
@@ -69,9 +71,9 @@ import org.apache.hadoop.util.ToolRunner;
 public class WALPlayer extends Configured implements Tool {
   private static final Log LOG = LogFactory.getLog(WALPlayer.class);
   final static String NAME = "WALPlayer";
-  final static String BULK_OUTPUT_CONF_KEY = "wal.bulk.output";
-  final static String TABLES_KEY = "wal.input.tables";
-  final static String TABLE_MAP_KEY = "wal.input.tablesmap";
+  public final static String BULK_OUTPUT_CONF_KEY = "wal.bulk.output";
+  public final static String TABLES_KEY = "wal.in

[6/6] hbase git commit: HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)

2016-04-22 Thread tedyu
HBASE-14123 HBase Backup/Restore Phase 2 (Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c61f7c34
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c61f7c34
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c61f7c34

Branch: refs/heads/HBASE-7912
Commit: c61f7c3424566d4479a7741a7e140e82502f6f9f
Parents: 0ca8655
Author: tedyu 
Authored: Fri Apr 22 09:14:49 2016 -0700
Committer: tedyu 
Committed: Fri Apr 22 09:14:49 2016 -0700

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |8 +-
 .../hbase/protobuf/generated/BackupProtos.java  | 1214 --
 hbase-protocol/src/main/protobuf/Backup.proto   |   18 +-
 .../hadoop/hbase/backup/BackupClient.java   |  108 ++
 .../hadoop/hbase/backup/BackupDriver.java   |   51 +-
 .../apache/hadoop/hbase/backup/BackupInfo.java  |  483 +++
 .../hbase/backup/BackupRestoreFactory.java  |   23 +-
 .../hadoop/hbase/backup/BackupStatus.java   |  103 ++
 .../hadoop/hbase/backup/HBackupFileSystem.java  |   20 +-
 .../hadoop/hbase/backup/RestoreDriver.java  |   60 +-
 .../hbase/backup/impl/BackupClientImpl.java |  231 
 .../hbase/backup/impl/BackupCommands.java   |  450 ++-
 .../hbase/backup/impl/BackupCopyService.java|   21 +-
 .../hbase/backup/impl/BackupException.java  |9 +-
 .../hadoop/hbase/backup/impl/BackupManager.java |  155 ++-
 .../hbase/backup/impl/BackupManifest.java   |   68 +-
 .../backup/impl/BackupRestoreConstants.java |3 +-
 .../hbase/backup/impl/BackupSystemTable.java|  436 +--
 .../backup/impl/BackupSystemTableHelper.java|  270 ++--
 .../hadoop/hbase/backup/impl/BackupUtil.java|  184 ++-
 .../backup/impl/IncrementalBackupManager.java   |   75 +-
 .../backup/impl/IncrementalRestoreService.java  |   10 +-
 .../hbase/backup/impl/RestoreClientImpl.java|  164 ++-
 .../hadoop/hbase/backup/impl/RestoreUtil.java   |   17 +-
 .../mapreduce/MapReduceBackupCopyService.java   |   50 +-
 .../mapreduce/MapReduceRestoreService.java  |  100 +-
 .../hbase/backup/master/BackupController.java   |   56 +
 .../hbase/backup/master/BackupLogCleaner.java   |9 +-
 .../backup/master/FullTableBackupProcedure.java |  751 +++
 .../master/IncrementalTableBackupProcedure.java |  331 +
 .../master/LogRollMasterProcedureManager.java   |   10 +-
 .../regionserver/LogRollBackupSubprocedure.java |   10 +-
 .../LogRollRegionServerProcedureManager.java|6 +-
 .../hadoop/hbase/backup/util/BackupSet.java |   62 +
 .../hadoop/hbase/mapreduce/WALInputFormat.java  |   30 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java   |   36 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   15 +-
 .../hbase/regionserver/HRegionServer.java   |3 +-
 .../hadoop/hbase/backup/TestBackupBase.java |   79 +-
 .../hbase/backup/TestBackupBoundaryTests.java   |2 -
 .../hadoop/hbase/backup/TestBackupDelete.java   |   96 ++
 .../hadoop/hbase/backup/TestBackupDescribe.java |   97 ++
 .../hbase/backup/TestBackupLogCleaner.java  |   15 +-
 .../hbase/backup/TestBackupShowHistory.java |   92 ++
 .../hbase/backup/TestBackupStatusProgress.java  |   98 ++
 .../hbase/backup/TestBackupSystemTable.java |  271 +++-
 .../hadoop/hbase/backup/TestFullBackup.java |   73 ++
 .../hadoop/hbase/backup/TestFullBackupSet.java  |   85 ++
 .../hadoop/hbase/backup/TestFullRestore.java|  166 +++
 .../hbase/backup/TestIncrementalBackup.java |7 +-
 .../backup/TestIncrementalBackupNoDataLoss.java |  124 ++
 .../hadoop/hbase/backup/TestRemoteBackup.java   |4 +
 .../hadoop/hbase/backup/TestRemoteRestore.java  |2 -
 .../hbase/backup/TestRestoreBoundaryTests.java  |1 -
 54 files changed, 5455 insertions(+), 1407 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c61f7c34/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index f19da53..3cc846c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -217,6 +217,7 @@ public class HBaseAdmin implements Admin {
   // want to wait a long time.
   private final int retryLongerMultiplier;
   private final int syncWaitTimeout;
+  private final long backupWaitTimeout;
   private boolean aborted;
   private int operationTimeout;
 
@@ -243,7 +244,8 @@ public class HBaseAdmin implements Admin {
 HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
 this.syncWaitTimeout = this.conf.getInt(
   "hbase.client

hbase git commit: HBASE-15392 Single Cell Get reads two HFileBlocks

2016-04-22 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 01165ffa8 -> fe632c214


HBASE-15392 Single Cell Get reads two HFileBlocks

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
moreRowsMayExistAfterCell Exploit the fact a Scan is a Get Scan. Also save 
compares
if no non-default stopRow.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
optimize Add doc on what is being optimized. Also, if a Get Scan, do not
optimize else we'll keep going after our row is DONE.
Another place to make use of the Get Scan fact is when we are DONE.. if
Get Scan, we can close out the scan.

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fe632c21
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fe632c21
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fe632c21

Branch: refs/heads/branch-1.3
Commit: fe632c214b250cce6aee61b140150bc56f109ce2
Parents: 01165ff
Author: stack 
Authored: Thu Apr 21 12:33:37 2016 -0700
Committer: stack 
Committed: Fri Apr 22 09:07:39 2016 -0700

--
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  5 ++
 .../hbase/io/hfile/CombinedBlockCache.java  | 13 ++-
 .../hbase/regionserver/KeyValueScanner.java | 14 +--
 .../hbase/regionserver/ScanQueryMatcher.java| 68 ---
 .../hadoop/hbase/regionserver/StoreScanner.java | 89 +---
 .../hbase/util/CollectionBackedScanner.java |  3 +-
 .../hbase/regionserver/KeyValueScanFixture.java |  3 +-
 .../regionserver/TestKeyValueScanFixture.java   |  3 +-
 8 files changed, 133 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fe632c21/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index aa65f44..2d0f618 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -2035,6 +2035,11 @@ public class KeyValue implements Cell, HeapSize, 
Cloneable, SettableSequenceId,
   right.getRowArray(), right.getRowOffset(), right.getRowLength());
 }
 
+public int compareRows(Cell left, byte[] right, int roffset, int rlength) {
+  return compareRows(left.getRowArray(), left.getRowOffset(), 
left.getRowLength(), right,
+  roffset, rlength);
+}
+
 /**
  * Get the b[],o,l for left and right rowkey portions and compare.
  * @param left

http://git-wip-us.apache.org/repos/asf/hbase/blob/fe632c21/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
index a1d3f98..4a1c2c7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
@@ -61,8 +61,8 @@ public class CombinedBlockCache implements 
ResizableBlockCache, HeapSize {
   @Override
   public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean 
inMemory,
   final boolean cacheDataInL1) {
-boolean isMetaBlock = buf.getBlockType().getCategory() != 
BlockCategory.DATA;
-if (isMetaBlock || cacheDataInL1) {
+boolean metaBlock = buf.getBlockType().getCategory() != BlockCategory.DATA;
+if (metaBlock || cacheDataInL1) {
   lruCache.cacheBlock(cacheKey, buf, inMemory, cacheDataInL1);
 } else {
   l2Cache.cacheBlock(cacheKey, buf, inMemory, false);
@@ -79,12 +79,9 @@ public class CombinedBlockCache implements 
ResizableBlockCache, HeapSize {
   boolean repeat, boolean updateCacheMetrics) {
 // TODO: is there a hole here, or just awkwardness since in the lruCache 
getBlock
 // we end up calling l2Cache.getBlock.
-if (lruCache.containsBlock(cacheKey)) {
-  return lruCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
-}
-Cacheable result = l2Cache.getBlock(cacheKey, caching, repeat, 
updateCacheMetrics);
-
-return result;
+return lruCache.containsBlock(cacheKey)?
+lruCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics):
+l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/fe632c21/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
-

hbase git commit: HBASE-15392 Single Cell Get reads two HFileBlocks

2016-04-22 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 5e552e57a -> 27446a5c4


HBASE-15392 Single Cell Get reads two HFileBlocks

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
moreRowsMayExistAfterCell Exploit the fact a Scan is a Get Scan. Also save 
compares
if no non-default stopRow.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
optimize Add doc on what is being optimized. Also, if a Get Scan, do not
optimize else we'll keep going after our row is DONE.
Another place to make use of the Get Scan fact is when we are DONE.. if
Get Scan, we can close out the scan.

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/27446a5c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/27446a5c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/27446a5c

Branch: refs/heads/branch-1
Commit: 27446a5c4a4bb1b8d6228b5a304c650c9cc6f49d
Parents: 5e552e5
Author: stack 
Authored: Thu Apr 21 12:33:37 2016 -0700
Committer: stack 
Committed: Fri Apr 22 09:07:09 2016 -0700

--
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  5 ++
 .../hbase/io/hfile/CombinedBlockCache.java  | 13 ++-
 .../hbase/regionserver/KeyValueScanner.java | 14 +--
 .../hbase/regionserver/ScanQueryMatcher.java| 68 ---
 .../hadoop/hbase/regionserver/StoreScanner.java | 89 +---
 .../hbase/util/CollectionBackedScanner.java |  3 +-
 .../hbase/regionserver/KeyValueScanFixture.java |  3 +-
 .../regionserver/TestKeyValueScanFixture.java   |  3 +-
 8 files changed, 133 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/27446a5c/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index aa65f44..2d0f618 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -2035,6 +2035,11 @@ public class KeyValue implements Cell, HeapSize, 
Cloneable, SettableSequenceId,
   right.getRowArray(), right.getRowOffset(), right.getRowLength());
 }
 
+public int compareRows(Cell left, byte[] right, int roffset, int rlength) {
+  return compareRows(left.getRowArray(), left.getRowOffset(), 
left.getRowLength(), right,
+  roffset, rlength);
+}
+
 /**
  * Get the b[],o,l for left and right rowkey portions and compare.
  * @param left

http://git-wip-us.apache.org/repos/asf/hbase/blob/27446a5c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
index a1d3f98..4a1c2c7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
@@ -61,8 +61,8 @@ public class CombinedBlockCache implements 
ResizableBlockCache, HeapSize {
   @Override
   public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean 
inMemory,
   final boolean cacheDataInL1) {
-boolean isMetaBlock = buf.getBlockType().getCategory() != 
BlockCategory.DATA;
-if (isMetaBlock || cacheDataInL1) {
+boolean metaBlock = buf.getBlockType().getCategory() != BlockCategory.DATA;
+if (metaBlock || cacheDataInL1) {
   lruCache.cacheBlock(cacheKey, buf, inMemory, cacheDataInL1);
 } else {
   l2Cache.cacheBlock(cacheKey, buf, inMemory, false);
@@ -79,12 +79,9 @@ public class CombinedBlockCache implements 
ResizableBlockCache, HeapSize {
   boolean repeat, boolean updateCacheMetrics) {
 // TODO: is there a hole here, or just awkwardness since in the lruCache 
getBlock
 // we end up calling l2Cache.getBlock.
-if (lruCache.containsBlock(cacheKey)) {
-  return lruCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
-}
-Cacheable result = l2Cache.getBlock(cacheKey, caching, repeat, 
updateCacheMetrics);
-
-return result;
+return lruCache.containsBlock(cacheKey)?
+lruCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics):
+l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/27446a5c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
-