hbase git commit: HBASE-17460 enable_table_replication can not perform cyclic replication of a table (NITIN VERMA)

2017-02-17 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master b392de3e3 -> 0e05537de


HBASE-17460 enable_table_replication can not perform cyclic replication of a 
table (NITIN VERMA)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e05537d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e05537d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e05537d

Branch: refs/heads/master
Commit: 0e05537dec32d31e1d025ddf60276df01ea0e28b
Parents: b392de3
Author: tedyu 
Authored: Fri Feb 17 17:56:53 2017 -0800
Committer: tedyu 
Committed: Fri Feb 17 17:56:53 2017 -0800

--
 .../apache/hadoop/hbase/HColumnDescriptor.java  |   2 +-
 .../apache/hadoop/hbase/HTableDescriptor.java   | 102 ++-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  31 --
 .../client/replication/ReplicationAdmin.java|   8 +-
 .../TestReplicationAdminWithClusters.java   |   1 +
 5 files changed, 129 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e05537d/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 1597a06..6d1ae3f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1076,7 +1076,7 @@ public class HColumnDescriptor implements 
Comparable {
   public int compareTo(HColumnDescriptor o) {
 int result = Bytes.compareTo(this.name, o.getName());
 if (result == 0) {
-  // punt on comparison for ordering, just calculate difference
+  // punt on comparison for ordering, just calculate difference.
   result = this.values.hashCode() - o.values.hashCode();
   if (result < 0)
 result = -1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0e05537d/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 60b85fe..a81c82a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -40,9 +40,9 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -1042,6 +1042,106 @@ public class HTableDescriptor implements 
Comparable {
   }
 
   /**
+   * Detects whether replication has been already enabled on any of the column 
families of this
+   * table descriptor.
+   * @return true if any of the column families has replication enabled.
+   */
+  public boolean isReplicationEnabled() {
+// Go through each Column-Family descriptor and check if the
+// Replication has been enabled already.
+// Return 'true' if replication has been enabled on any CF,
+// otherwise return 'false'.
+//
+boolean result = false;
+Iterator it = this.families.values().iterator();
+
+while (it.hasNext()) {
+  HColumnDescriptor tempHcd = it.next();
+  if (tempHcd.getScope() != HConstants.REPLICATION_SCOPE_LOCAL) {
+result = true;
+break;
+  }
+}
+
+return result;
+  }
+
+  /**
+   * Compare the contents of the descriptor with another one passed as a 
parameter for replication
+   * purpose. The REPLICATION_SCOPE field is ignored during comparison.
+   * @param obj descriptor on source cluster which needs to be replicated.
+   * @return true if the contents of the two descriptors match (ignoring just 
REPLICATION_SCOPE).
+   * @see java.lang.Object#equals(java.lang.Object)
+   */
+  public boolean compareForReplication(Object obj) {
+if (this == obj) {
+  return true;
+}
+if (obj == null) {
+  return false;
+}
+if (!(obj instanceof HTableDescriptor)) {
+  return false;
+}
+
+boolean result = false;
+
+// Create a copy of peer HTD as we need to change its replication
+// scope to match with the local HTD.
+HTableDescriptor peerHtdCopy = new HTableDescri

[15/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index 43ee139..65946ce 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -514,6 +514,18 @@ service.
HColumnDescriptor columnFamily) 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.addColumnFamily(TableName tableName,
+   HColumnDescriptor columnFamily)
+Add a column family to an existing table.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncHBaseAdmin.addColumnFamily(TableName tableName,
+   HColumnDescriptor columnFamily) 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.addColumnFamilyAsync(TableName tableName,
 HColumnDescriptor columnFamily)
@@ -566,6 +578,18 @@ service.
   HColumnDescriptor columnFamily) 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.modifyColumnFamily(TableName tableName,
+  HColumnDescriptor columnFamily)
+Modify an existing column family on a table.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncHBaseAdmin.modifyColumnFamily(TableName tableName,
+  HColumnDescriptor columnFamily) 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.modifyColumnFamilyAsync(TableName tableName,
HColumnDescriptor columnFamily)
@@ -597,6 +621,14 @@ service.
 
 
 void
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+ TableName tableName,
+ HColumnDescriptor columnFamily)
+Deprecated. 
+
+
+
+void
 MasterObserver.postAddColumn(ObserverContext ctx,
  TableName tableName,
  HColumnDescriptor columnFamily)
@@ -607,7 +639,7 @@ service.
 
 
 
-
+
 void
 BaseMasterObserver.postAddColumn(ObserverContext ctx,
  TableName tableName,
@@ -619,15 +651,13 @@ service.
 
 
 
-
+
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
- TableName tableName,
- HColumnDescriptor columnFamily)
-Deprecated. 
-
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+   TableName tableName,
+   HColumnDescriptor columnFamily) 
 
-
+
 void
 MasterObserver.postAddColumnFamily(ObserverContext ctx,
TableName tableName,
@@ -635,19 +665,21 @@ service.
 Called after the new column family has been created.
 
 
-
+
 void
 BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
TableName tableName,
HColumnDescriptor columnFamily) 
 
-
+
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
-   TableName tableName,
-   HColumnDescriptor columnFamily) 
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+TableName tableName,
+HCo

[47/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/ServerName.html
index 0031263..bd0b5b0 100644
--- a/apidocs/org/apache/hadoop/hbase/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/ServerName.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":9,"i9":9,"i10":10,"i11":10,"i12":10,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":10,"i21":10,"i22":9,"i23":9,"i24":9};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":41,"i8":41,"i9":41,"i10":10,"i11":10,"i12":10,"i13":9,"i14":9,"i15":41,"i16":41,"i17":9,"i18":41,"i19":9,"i20":10,"i21":10,"i22":9,"i23":9,"i24":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -115,12 +115,12 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class ServerName
+public class ServerName
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable
-Instance of an HBase ServerName.
- A server name is used uniquely identifying a server instance in a cluster and 
is made
- of the combination of hostname, port, and startcode.  The startcode 
distingushes restarted
+Name of a particular incarnation of an HBase Server.
+ A ServerName is used uniquely 
identifying a server instance in a cluster and is made
+ of the combination of hostname, port, and startcode.  The startcode 
distinguishes restarted
  servers on same hostname and port (startcode is usually timestamp of server 
startup). The
  toString()
 format of ServerName is safe to use in the  filesystem and as znode name
  up in ZooKeeper.  Its format is:
@@ -129,10 +129,14 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
  For example, if hostname is www.example.org, port is 
1234,
  and the startcode for the regionserver is 1212121212, then
  the toString()
 would be www.example.org,1234,1212121212.
- 
+
  You can obtain a versioned serialized form of this class by calling
- getVersionedBytes().
  To deserialize, call parseVersionedServerName(byte[])
- 
+ getVersionedBytes().
  To deserialize, call
+ parseVersionedServerName(byte[]).
+
+ Use getAddress()
 to obtain the Server hostname + port
+ (Endpoint/Socket Address).
+
  Immutable.
 
 See Also:
@@ -214,7 +218,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 Method Summary
 
-All Methods Static Methods Instance Methods Concrete Methods 
+All Methods Static Methods Instance Methods Concrete Methods Deprecated Methods 
 
 Modifier and Type
 Method and Description
@@ -228,16 +232,20 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object o) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-getHostAndPort() 
+Address
+getAddress() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-getHostname() 
+getHostAndPort()
+Deprecated. 
+Since 2.0. Use getAddress()
 instead.
+
+
 
 
-com.google.common.net.HostAndPort
-getHostPort() 
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+getHostname() 
 
 
 int
@@ -250,17 +258,27 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getServerName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String hostAndPort,
- long startcode) 
+ long startcode)
+Deprecated. 
+Since 2.0. Use valueOf(String,
 long) instead.
+
+
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interfa

[43/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/CompactionState.html 
b/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
index e24c2cc..26af7f1 100644
--- a/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
+++ b/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
@@ -259,7 +259,7 @@ the order they are declared.
 
 
 values
-public static CompactionState[] values()
+public static CompactionState[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -279,7 +279,7 @@ for (CompactionState c : CompactionState.values())
 
 
 valueOf
-public static CompactionState valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompactionState valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/Durability.html
index 58db6a9..7efcff6 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Durability.html
@@ -294,7 +294,7 @@ the order they are declared.
 
 
 values
-public static Durability[] values()
+public static Durability[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -314,7 +314,7 @@ for (Durability c : Durability.values())
 
 
 valueOf
-public static Durability valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Durability valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Result.html 
b/apidocs/org/apache/hadoop/hbase/client/Result.html
index 23da111..f3966fc 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Result.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Result.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":10,"i33":10,"i34":10,"i35":42,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Stable
-public class Result
+public class Result
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.hbase.CellScannable, 
org.apache.hadoop.hbase.CellScanner
 Single row result of a Get or Scan query.
@@ -201,7 +201,7 @@ implements org.apache.hadoop.hbase.CellScannable, 
or

[49/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 37446bb..e946682 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20170205144805+00'00')
-/ModDate (D:20170205144805+00'00')
+/CreationDate (D:20170217144817+00'00')
+/ModDate (D:20170217144817+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 4035 0 R
-/PageLabels 4242 0 R
+/Outlines 4044 0 R
+/PageLabels 4252 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 674
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 55 
0 R 58 0 R 60 0 R 62 0 R 66 0 R 71 0 R 74 0 R 79 0 R 81 0 R 84 0 R 86 0 R 92 0 
R 101 0 R 106 0 R 108 0 R 129 0 R 135 0 R 142 0 R 144 0 R 149 0 R 152 0 R 162 0 
R 170 0 R 181 0 R 191 0 R 195 0 R 197 0 R 201 0 R 207 0 R 209 0 R 211 0 R 213 0 
R 215 0 R 218 0 R 224 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 R 237 0 R 239 0 
R 243 0 R 246 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 259 0 R 261 0 R 267 0 
R 270 0 R 272 0 R 274 0 R 276 0 R 281 0 R 286 0 R 291 0 R 295 0 R 298 0 R 313 0 
R 323 0 R 329 0 R 340 0 R 350 0 R 355 0 R 357 0 R 359 0 R 370 0 R 375 0 R 379 0 
R 384 0 R 388 0 R 399 0 R 411 0 R 425 0 R 435 0 R 437 0 R 439 0 R 444 0 R 454 0 
R 467 0 R 477 0 R 481 0 R 484 0 R 488 0 R 492 0 R 495 0 R 498 0 R 500 0 R 503 0 
R 507 0 R 509 0 R 514 0 R 518 0 R 524 0 R 528 0 R 530 0 R 536 0 R 538 0 R 542 0 
R 550 0 R 552 0 R 555 0 R 559 0 R 562 0 R 565 0 R 580 0 R 587 0 R 594 0 R 605 0 
R 611 0 R 619 0 R 627 0 R 630 0 R 634 0
  R 637 0 R 648 0 R 656 0 R 662 0 R 668 0 R 672 0 R 674 0 R 688 0 R 700 0 R 706 
0 R 712 0 R 715 0 R 724 0 R 732 0 R 736 0 R 741 0 R 747 0 R 749 0 R 751 0 R 753 
0 R 761 0 R 770 0 R 774 0 R 782 0 R 790 0 R 796 0 R 800 0 R 806 0 R 810 0 R 816 
0 R 824 0 R 826 0 R 830 0 R 835 0 R 842 0 R 845 0 R 852 0 R 861 0 R 865 0 R 867 
0 R 870 0 R 874 0 R 879 0 R 882 0 R 894 0 R 898 0 R 903 0 R 911 0 R 916 0 R 920 
0 R 925 0 R 927 0 R 930 0 R 932 0 R 936 0 R 938 0 R 941 0 R 945 0 R 949 0 R 954 
0 R 959 0 R 962 0 R 964 0 R 971 0 R 975 0 R 980 0 R 993 0 R 997 0 R 1001 0 R 
1006 0 R 1008 0 R 1017 0 R 1020 0 R 1025 0 R 1028 0 R 1037 0 R 1040 0 R 1046 0 
R 1053 0 R 1056 0 R 1058 0 R 1067 0 R 1069 0 R 1071 0 R 1074 0 R 1076 0 R 1078 
0 R 1080 0 R 1082 0 R 1084 0 R 1087 0 R 1090 0 R 1095 0 R 1098 0 R 1100 0 R 
1102 0 R 1104 0 R 1109 0 R 1118 0 R 1121 0 R 1123 0 R 1125 0 R 1130 0 R 1132 0 
R 1135 0 R 1137 0 R 1139 0 R 1141 0 R 1144 0 R 1149 0 R 1155 0 R 1162 0 R 1167 
0 R 1181 0 R 1192 0 R 1196 0 R 1211 0 R 1220 0 R 
 1234 0 R 1238 0 R 1248 0 R 1261 0 R 1265 0 R 1277 0 R 1286 0 R 1293 0 R 1297 0 
R 1306 0 R 1311 0 R 1315 0 R 1321 0 R 1327 0 R 1334 0 R 1342 0 R 1344 0 R 1356 
0 R 1358 0 R 1363 0 R 1367 0 R 1372 0 R 1382 0 R 1388 0 R 1394 0 R 1396 0 R 
1398 0 R 1410 0 R 1417 0 R 1427 0 R 1432 0 R 1445 0 R 1452 0 R 1455 0 R 1464 0 
R 1473 0 R 1478 0 R 1484 0 R 1488 0 R 1491 0 R 1493 0 R 1500 0 R 1503 0 R 1510 
0 R 1514 0 R 1517 0 R 1526 0 R 1530 0 R 1533 0 R 1535 0 R 1544 0 R 1551 0 R 
1557 0 R 1562 0 R 1566 0 R 1569 0 R 1575 0 R 1580 0 R 1585 0 R 1587 0 R 1589 0 
R 1592 0 R 1594 0 R 1602 0 R 1605 0 R 1611 0 R 1618 0 R 1622 0 R 1627 0 R 1630 
0 R 1632 0 R 1637 0 R 1640 0 R 1642 0 R 1644 0 R 1646 0 R 1652 0 R 1662 0 R 
1664 0 R 1666 0 R 1668 0 R 1670 0 R 1673 0 R 1675 0 R 1677 0 R 1679 0 R 1682 0 
R 1684 0 R 1686 0 R 1688 0 R 1692 0 R 1696 0 R 1705 0 R 1707 0 R 1709 0 R 1711 
0 R 1713 0 R 1720 0 R 1722 0 R 1727 0 R 1729 0 R 1731 0 R 1738 0 R 1743 0 R 
1747 0 R 1751 0 R 1754 0 R 1757 0 R 1761 0 R 1763 0 R 1766 0 
 R 1768 0 R 1770 0 R 1772 0 R 1776 0 R 1778 0 R 1782 0 R 1784 0 R 1786 0 R 1788 
0 R 1790 0 R 1798 0 R 1801 0 R 1806 0 R 1808 0 R 1810 0 R 1812 0 R 1814 0 R 
1822 0 R 1832 0 R 1835 0 R 1851 0 R 1866 0 R 1870 0 R 1875 0 R 1880 0 R 1883 0 
R 1888 0 R 1890 0 R 1895 0 R 1897 0 R 1900 0 R 1902 0 R 1904 0 R 1906 0 R 1908 
0 R 1912 0 R 1914 0 R 1918 0 R 1922 0 R 1930 0 R 1936 0 R 1947 0 R 1961 0 R 
1974 0 R 1992 0 R 1996 0 R 1998 0 R 2002 0 R 2019 0 R 2027 0 R 2034 0 R 2043 0 
R 2047 0 R 2057 0 R 2068 0 R 2074 0 R 2083 0 R 2096 0 R 2113 0 R 2125 0 R 2128 
0 R 2137 0 R 2152 0 R 2159 0 R 2162 0 R 2167 0 R 2172 0 R 2182 0 R 2190 0 R 
2193 0 R 2195 0 R 2199 0 R 2214 0 R 2223 0 R 2228 0 R 2232 0 R 2235 0 R 2237 0 
R 2239 0 R 2241 0 R 2243 0 R 2248 0 R 2250 0 R 2260 0 R 2270 0 R 2277 0 R 2289 
0 R 2294 0 R 2298 0 R 2311 0 R 2318 0 R 2324 0 R 2326 0 R 2336 0 R 2343 0 R 
2354 0 R 2358 0 R 2367 0 R 2373 0 R 2383 0 R 2392 0 R 2400 0 R 2406 0 R 2411 0 
R 2415 0 R 2418 0 R 2420 0 R 2426 0 R 2430 0 R 2434 0 R 2440 
 0 R 2447 0 R 2452 0 R 2456 0 R 2465 0 R 2

[41/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 65b5bc7..7cc4b0d 100644
--- a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -228,8 +228,11 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 void
 addPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
ReplicationPeerConfig peerConfig)
-Deprecated. 
-Add a new remote slave cluster for replication.
+Deprecated. 
+use
+ Admin.addReplicationPeer(String,
 ReplicationPeerConfig)
+ instead
+
 
 
 
@@ -248,7 +251,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 appendPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
 Deprecated. 
-Append the replicable table-cf config of the specified 
peer
 
 
 
@@ -270,8 +272,10 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 void
 disablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
-Deprecated. 
-Stop the replication stream to the specified peer.
+Deprecated. 
+use Admin.disableReplicationPeer(String)
+ instead
+
 
 
 
@@ -286,8 +290,10 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 void
 enablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
-Deprecated. 
-Restart the replication stream to the specified peer.
+Deprecated. 
+use Admin.enableReplicationPeer(String)
+ instead
+
 
 
 
@@ -302,21 +308,22 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 ReplicationPeerConfig
 getPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
-Deprecated. 
- 
+Deprecated. 
+use Admin.getReplicationPeerConfig(String)
+ instead
+
+
 
 
 int
 getPeersCount()
-Deprecated. 
-Get the number of slave clusters the local cluster 
has.
+Deprecated.  
 
 
 
 boolean
 getPeerState(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
 Deprecated. 
-Get the state of the specified peer cluster
 
 
 
@@ -331,8 +338,10 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,ReplicationPeerConfig>
 listPeerConfigs()
-Deprecated. 
- 
+Deprecated. 
+use Admin.listReplicationPeers()
 instead
+
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListHashMapString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>>
@@ -359,8 +368,9 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 void
 removePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id)
-Deprecated. 
-Removes a peer cluster and stops the replication to 
it.
+Deprecated. 
+use Admin.removeReplicationPeer(String)
 instead
+
 
 
 
@@ -368,7 +378,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 removePeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
   http://docs.oracle.com/javase/8/docs/api

[35/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
index fcef767..f54b50e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
@@ -34,346 +34,378 @@
 026
 027import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 028import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-029import 
org.apache.hadoop.hbase.util.Addressing;
-030import 
org.apache.hadoop.hbase.util.Bytes;
-031
-032import 
com.google.common.net.HostAndPort;
+029import 
org.apache.hadoop.hbase.net.Address;
+030import 
org.apache.hadoop.hbase.util.Addressing;
+031import 
org.apache.hadoop.hbase.util.Bytes;
+032
 033import 
com.google.common.net.InetAddresses;
 034
-035/**
-036 * Instance of an HBase ServerName.
-037 * A server name is used uniquely 
identifying a server instance in a cluster and is made
-038 * of the combination of hostname, port, 
and startcode.  The startcode distingushes restarted
-039 * servers on same hostname and port 
(startcode is usually timestamp of server startup). The
-040 * {@link #toString()} format of 
ServerName is safe to use in the  filesystem and as znode name
-041 * up in ZooKeeper.  Its format is:
-042 * <hostname> 
'{@link #SERVERNAME_SEPARATOR}' <port>
-043 * '{@link #SERVERNAME_SEPARATOR}' 
<startcode>.
-044 * For example, if hostname is 
www.example.org, port is 
1234,
-045 * and the startcode for the regionserver 
is 1212121212, then
-046 * the {@link #toString()} would be 
www.example.org,1234,1212121212.
-047 * 
-048 * 

You can obtain a versioned serialized form of this class by calling -049 * {@link #getVersionedBytes()}. To deserialize, call {@link #parseVersionedServerName(byte[])} -050 * -051 *

Immutable. -052 */ -053@InterfaceAudience.Public -054@InterfaceStability.Evolving -055 public class ServerName implements Comparable, Serializable { -056 private static final long serialVersionUID = 1367463982557264981L; -057 -058 /** -059 * Version for this class. -060 * Its a short rather than a byte so I can for sure distinguish between this -061 * version of this class and the version previous to this which did not have -062 * a version. -063 */ -064 private static final short VERSION = 0; -065 static final byte [] VERSION_BYTES = Bytes.toBytes(VERSION); -066 -067 /** -068 * What to use if no startcode supplied. -069 */ -070 public static final int NON_STARTCODE = -1; +035 +036/** +037 * Name of a particular incarnation of an HBase Server. +038 * A {@link ServerName} is used uniquely identifying a server instance in a cluster and is made +039 * of the combination of hostname, port, and startcode. The startcode distinguishes restarted +040 * servers on same hostname and port (startcode is usually timestamp of server startup). The +041 * {@link #toString()} format of ServerName is safe to use in the filesystem and as znode name +042 * up in ZooKeeper. Its format is: +043 * <hostname> '{@link #SERVERNAME_SEPARATOR}' <port> +044 * '{@link #SERVERNAME_SEPARATOR}' <startcode>. +045 * For example, if hostname is www.example.org, port is 1234, +046 * and the startcode for the regionserver is 1212121212, then +047 * the {@link #toString()} would be www.example.org,1234,1212121212. +048 * +049 *

You can obtain a versioned serialized form of this class by calling +050 * {@link #getVersionedBytes()}. To deserialize, call +051 * {@link #parseVersionedServerName(byte[])}. +052 * +053 *

Use {@link #getAddress()} to obtain the Server hostname + port +054 * (Endpoint/Socket Address). +055 * +056 *

Immutable. +057 */ +058@InterfaceAudience.Public +059@InterfaceStability.Evolving +060public class ServerName implements Comparable, Serializable { +061 private static final long serialVersionUID = 1367463982557264981L; +062 +063 /** +064 * Version for this class. +065 * Its a short rather than a byte so I can for sure distinguish between this +066 * version of this class and the version previous to this which did not have +067 * a version. +068 */ +069 private static final short VERSION = 0; +070 static final byte [] VERSION_BYTES = Bytes.toBytes(VERSION); 071 072 /** -073 * This character is used as separator between server hostname, port and -074 * startcode. -075 */ -076 public static final String SERVERNAME_SEPARATOR = ","; -077 -078 public static final Pattern SERVERNAME_PATTERN = -079Pattern.compile("[^" + SERVERNAME_SEPARATOR + "]+" + -080 SERVERNAME_SEPARA


[37/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index d529a3c..cb1bcc7 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -124,36 +124,44 @@
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+RawDouble.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 OrderedInt64.decode(PositionedByteRange src) 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRange src) 
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRange src) 
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRange src) 
+
 
 byte[]
 OrderedBlobVar.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRange src) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
 RawByte.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRange src) 
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+RawLong.decode(PositionedByteRange src) 
 
 
 T
@@ -162,60 +170,52 @@
 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRange src) 
 
 
-T
-TerminatedWrapper.decode(PositionedByteRange src) 
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 OrderedString.decode(PositionedByteRange src) 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRange src) 
-
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRange src) 
 
 
 byte[]
 OrderedBlob.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+OrderedFloat32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+byte[]
+RawBytes.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-ex

[24/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index cd29658..c83492c 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -648,7 +648,7 @@
 
 do nothing, core of snapshot is executed in FlushSnapshotSubprocedure.insideBarrier()
 step.
 
-acquireBufferQuota(long)
 - Method in class org.apache.hadoop.hbase.replication.regionserver.ReplicationSource.ReplicationSourceWorkerThread
+acquireBufferQuota(long)
 - Method in class org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReaderThread
  
 ACQUIRED_BARRIER_ZNODE_DEFAULT
 - Static variable in class org.apache.hadoop.hbase.procedure.ZKProcedureUtil
  
@@ -723,6 +723,8 @@
 
 action - 
Variable in class org.apache.hadoop.hbase.client.Action
  
+action(AsyncAdminRequestRetryingCaller.Callable)
 - Method in class org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder
+ 
 action(AsyncMasterRequestRpcRetryingCaller.Callable)
 - Method in class org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder
  
 action(AsyncSingleRequestRpcRetryingCaller.Callable)
 - Method in class org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder
@@ -771,6 +773,8 @@
  
 active
 - Variable in class org.apache.hadoop.hbase.regionserver.AbstractMemStore
  
+ACTIVE_WORKER_COUNT_KEY
 - Static variable in interface org.apache.hadoop.hbase.thrift.MetricsThriftServerSource
+ 
 activeExecutorCount
 - Variable in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor
  
 activeHandlerCount
 - Variable in class org.apache.hadoop.hbase.ipc.RpcExecutor
@@ -795,6 +799,8 @@
  
 activeTasks(SplitLogManager.TaskBatch)
 - Method in class org.apache.hadoop.hbase.master.SplitLogManager
  
+activeWorkerCountGauge
 - Variable in class org.apache.hadoop.hbase.thrift.MetricsThriftServerSourceImpl
+ 
 activeWriteHandlerCount
 - Variable in class org.apache.hadoop.hbase.ipc.RWQueueRpcExecutor
  
 activeZKServerIndex
 - Variable in class org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster
@@ -936,11 +942,7 @@
 
 add(MasterProcedureScheduler.Queue)
 - Method in class org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.FairQueue
  
-add(Procedure,
 boolean) - Method in class 
org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.NamespaceQueue
- 
-add(Procedure,
 boolean) - Method in class 
org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.QueueImpl
- 
-add(Procedure,
 boolean) - Method in interface 
org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.QueueInterface
+add(Procedure,
 boolean) - Method in class 
org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.Queue
  
 add(Result)
 - Method in class org.apache.hadoop.hbase.MetaTableAccessor.CollectAllVisitor
  
@@ -1147,7 +1149,7 @@
  FileDescriptorProtos for all files in files_to_generate and everything
  they import.
 
-addAllServers(Collection)
 - Method in class org.apache.hadoop.hbase.rsgroup.RSGroupInfo
+addAllServers(Collection
) - Method in class org.apache.hadoop.hbase.rsgroup.RSGroupInfo Adds a group of servers. @@ -1190,8 +1192,6 @@ Adds all the attributes into the Operation object -addBack(Procedure) - Method in class org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.QueueImpl -  addBack(Procedure) - Method in class org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler   addBack(Procedure) - Method in interface org.apache.hadoop.hbase.procedure2.ProcedureScheduler @@ -1326,6 +1326,12 @@ Add a column family to an existing table. +addColumnFamily(TableName, HColumnDescriptor) - Method in interface org.apache.hadoop.hbase.client.AsyncAdmin + +Add a column family to an existing table. + +addColumnFamily(TableName, HColumnDescriptor) - Method in class org.apache.hadoop.hbase.client.AsyncHBaseAdmin +  addColumnFamily(TableName, HColumnDescriptor) - Method in class org.apache.hadoop.hbase.client.HBaseAdmin   addColumnFamily(ColumnSchemaModel) - Method in class org.apache.hadoop.hbase.rest.model.TableSchemaModel @@ -1350,6 +1356,8 @@   AddColumnFamilyProcedure(MasterProcedureEnv, TableName, HColumnDescriptor, ProcedurePrepareLatch) - Constructor for class org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure   +AddColumnFamilyProcedureBiConsumer(AsyncAdmin, TableName) - Constructor for class org.apache.hadoop.hbase.client.AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer +  addColumns(byte[], byte[]) - Method in class org.apache.hadoop.hbase.client.Delete Delete all versions of the specified column. @@ -1484,6 +1492,8 @@ Adds a new entry with the specified key, value, hash code, and bucket index to the map. +addEntry(WAL.Entry) - Method in class org.apa

[48/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 9cf49c9..f8eba53 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20170205144937)
-  /CreationDate (D:20170205144937)
+  /ModDate (D:20170217144957)
+  /CreationDate (D:20170217144957)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 6fe32c0..efd05d0 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -13,6 +13,7 @@
 
 
 AccessDeniedException
+Address
 Admin
 Append
 AsyncAdmin

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index a28ed3e..c64c1a8 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -13,6 +13,7 @@
 
 
 AccessDeniedException
+Address
 Admin
 Append
 AsyncAdmin

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index 7f473e4..eabb570 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -210,11 +210,21 @@
 
 
 
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.addPeer(String,
 ReplicationPeerConfig)
+use
+ Admin.addReplicationPeer(String,
 ReplicationPeerConfig)
+ instead
+
+
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.addPeer(String,
 ReplicationPeerConfig, Map>)
 as release of 2.0.0, and 
it will be removed in 3.0.0,
  use ReplicationAdmin.addPeer(String,
 ReplicationPeerConfig) instead.
 
 
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.appendPeerTableCFs(String,
 Map>)
+
 
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.appendPeerTableCFs(String,
 String)
 as release of 2.0.0, and 
it will be removed in 3.0.0,
@@ -243,11 +253,23 @@
 
 
 
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disablePeer(String)
+use Admin.disableReplicationPeer(String)
+ instead
+
+
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disableTableRep(TableName)
 use Admin.disableTableReplication(TableName)
  instead
 
 
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enablePeer(String)
+use Admin.enableReplicationPeer(String)
+ instead
+
+
 
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enableTableRep(TableName)
 use Admin.enableTableReplication(TableName)
@@ -296,13 +318,30 @@
 
 
 
+org.apache.hadoop.hbase.ServerName.getHostAndPort()
+Since 2.0. Use ServerName.getAddress()
 instead.
+
+
+
 org.apache.hadoop.hbase.HTableDescriptor.getName()
 Use HTableDescriptor.getTableName()
 instead
 
 
-
+
 org.apache.hadoop.hbase.HTableDescriptor.getOwnerString()
 
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.getPeerConfig(String)
+use Admin.getReplicationPeerConfig(String)
+ instead
+
+
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.getPeersCount() 
+
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.getPeerState(String)
+
 
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.getPeerTableCFs(String)
 as release of 2.0.0, and 
it will be removed in 3.0.0,
@@ -329,8 +368,18 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.getRpcTimeout()
 
 
-org.apache.hadoop.hbase.io.ImmutableBytesWritable.getSize()
-use ImmutableBytesWritable.getLength()
 instead
+org.apache.hadoop.hbase.ServerName.getServerName(String,
 long)
+Since 2.0. Use ServerName.valueOf(String,
 long) instead.
+
+
+
+org.apache.hadoop.hbase.ServerName.getServerNameLessStartCode(String)
+Since 2.0. Use ServerName.getAddress()
+
+
+
+org.apache.hadoop.hbase.ServerName.getServerStartcodeFromServerName(String)
+Since 2.0. Use instance of 
ServerName to pull out start code.
 
 
 
@@ -339,51 +388,67 @@
 
 
 
+org.apache.hadoop.hbase.io.ImmutableBytesWritable.getSize()
+use ImmutableBytesWritable.getLength()
 instead
+
+
+
 org.apache.hadoop.hbase.client.SnapshotDescription.getTable()
 Use getTableName() or 
getTableNameAsString() instead.
 
 
-
+
 org.apache.hadoop.hbase.HTableDescriptor

[29/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/book.html
--
diff --git a/book.html b/book.html
index af2676f..ef9d462 100644
--- a/book.html
+++ b/book.html
@@ -238,46 +238,47 @@
 139. Storing Snapshots in Microsoft Azure Blob 
Storage
 140. Capacity Planning and Region Sizing
 141. Table Rename
+142. RegionServer Grouping
 
 
 Building and Developing Apache HBase
 
-142. Getting Involved
-143. Apache HBase Repositories
-144. IDEs
-145. Building Apache HBase
-146. Releasing Apache HBase
-147. Voting on Release Candidates
-148. Generating the HBase Reference Guide
-149. Updating http://hbase.apache.org";>hbase.apache.org
-150. Tests
-151. Developer Guidelines
+143. Getting Involved
+144. Apache HBase Repositories
+145. IDEs
+146. Building Apache HBase
+147. Releasing Apache HBase
+148. Voting on Release Candidates
+149. Generating the HBase Reference Guide
+150. Updating http://hbase.apache.org";>hbase.apache.org
+151. Tests
+152. Developer Guidelines
 
 
 Unit Testing HBase Applications
 
-152. JUnit
-153. Mockito
-154. MRUnit
-155. 
Integration Testing with an HBase Mini-Cluster
+153. JUnit
+154. Mockito
+155. MRUnit
+156. 
Integration Testing with an HBase Mini-Cluster
 
 
 Protobuf in HBase
 
-156. Protobuf
+157. Protobuf
 
 
 ZooKeeper
 
-157. Using existing 
ZooKeeper ensemble
-158. SASL Authentication with ZooKeeper
+158. Using existing 
ZooKeeper ensemble
+159. SASL Authentication with ZooKeeper
 
 
 Community
 
-159. Decisions
-160. Community Roles
-161. Commit Message format
+160. Decisions
+161. Community Roles
+162. Commit Message format
 
 
 Appendix
@@ -287,7 +288,7 @@
 Appendix C: hbck In Depth
 Appendix D: Access Control Matrix
 Appendix E: Compression and Data Block Encoding In 
HBase
-162. Enable Data Block 
Encoding
+163. Enable Data Block 
Encoding
 Appendix F: SQL over HBase
 Appendix G: YCSB
 Appendix H: HFile format
@@ -296,8 +297,8 @@
 Appendix K: HBase and the Apache Software 
Foundation
 Appendix L: Apache HBase Orca
 Appendix M: Enabling Dapper-like Tracing in 
HBase
-163. Client Modifications
-164. Tracing from HBase Shell
+164. Client Modifications
+165. Tracing from HBase Shell
 Appendix N: 0.95 RPC Specification
 
 
@@ -17037,13 +17038,13 @@ hcd.setMobThreshold(102400L);
 
 74.2. Testing 
MOB
 
-The utility org.apache.hadoop.hbase.IntegrationTestIngestMOB 
is provided to assist with testing
+The utility 
org.apache.hadoop.hbase.IntegrationTestIngestWithMOB is provided 
to assist with testing
 the MOB feature. The utility is run as follows:
 
 
 
-$ sudo -u hbase hbase 
org.apache.hadoop.hbase.IntegrationTestIngestMOB \
--threshold 102400 \
+$ sudo -u hbase hbase 
org.apache.hadoop.hbase.IntegrationTestIngestWithMOB \
+-threshold 1024 \
 -minMobDataSize 512 \
 -maxMobDataSize 5120
 
@@ -17121,100 +17122,19 @@ suit your environment, and restart or rolling 
restart the RegionServer.
 
 To manually compact MOB files, rather than waiting for the
 configuration to trigger compaction, use the
-compact_mob or major_compact_mob HBase shell 
commands. These commands
-require the first argument to be the table name, and take an optional column
-family as the second argument. If the column family is omitted, all MOB-enabled
-column families are compacted.
+compact or major_compact HBase shell commands. These 
commands
+require the first argument to be the table name, and take a column
+family as the second argument. and take a compaction type as the third 
argument.
 
 
 
-hbase> compact_mob 't1', 'c1'
-hbase> compact_mob 't1'
-hbase> major_compact_mob 't1', 'c1'
-hbase> major_compact_mob 't1'
+hbase> compact 't1', 'c1’, ‘MOB’
+hbase> major_compact_mob 't1', 'c1’, ‘MOB’
 
 
 
-These commands are also available via Admin.compactMob and
-Admin.majorCompactMob methods.
-
-
-
-74.4.2. MOB 
Sweeper
-
-HBase MOB a MapReduce job called the Sweeper tool for
-optimization. The Sweeper tool coalesces small MOB files or MOB files with many
-deletions or updates. The Sweeper tool is not required if you use native MOB 
compaction, which
-does not rely on MapReduce.
-
-
-To configure the Sweeper tool, set the following options:
-
-
-
-
-hbase.mob.sweep.tool.compaction.ratio
-0.5f
-
-  If there are too many cells deleted in a mob file, it's regarded
-  as an invalid file and needs to be merged.
-  If existingCellsSize/mobFileSize is less than ratio, it's regarded
-  as an invalid file. The default value is 0.5f.
-
-
-
-hbase.mob.sweep.tool.compaction.mergeable.size
-134217728
-
-  If the size of a mob file is less than this value, it's regarded as a 
small
-  file and needs to be merged. The default value is 128MB.
-
-
-
-hbase.mob.sweep

[39/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html 
b/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
index dc731df..a079ecc 100644
--- a/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
+++ b/apidocs/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
@@ -357,7 +357,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createKey
-public ImmutableBytesWritable createKey()
+public ImmutableBytesWritable createKey()
 
 Returns:
 ImmutableBytesWritable
@@ -372,7 +372,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createValue
-public Result createValue()
+public Result createValue()
 
 Returns:
 RowResult
@@ -387,7 +387,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getPos
-public long getPos()
+public long getPos()
 
 
 
@@ -396,7 +396,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getProgress
-public float getProgress()
+public float getProgress()
 
 
 
@@ -405,7 +405,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 next
-public boolean next(ImmutableBytesWritable key,
+public boolean next(ImmutableBytesWritable key,
 Result value)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
index 8f856d3..985b067 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
@@ -1224,7 +1224,7 @@ public static void 
 
 buildDependencyClasspath
-public static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String buildDependencyClasspath(org.apache.hadoop.conf.Configuration conf)
+public static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String buildDependencyClasspath(org.apache.hadoop.conf.Configuration conf)
 Returns a classpath string built from the content of the 
"tmpjars" value in conf.
  Also exposed to shell scripts via `bin/hbase mapredcp`.
 
@@ -1235,7 +1235,7 @@ public static void 
 
 addDependencyJars
-public static void addDependencyJars(org.apache.hadoop.mapreduce.Job job)
+public static void addDependencyJars(org.apache.hadoop.mapreduce.Job job)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Add the HBase dependency jars as well as jars for any of 
the configured
  job classes to the job configuration, so that JobClient will ship them
@@ -1253,7 +1253,7 @@ public static void 
 addDependencyJars
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public static void addDependencyJars(org.apache.hadoop.conf.Configuration conf,
+public static void addDependencyJars(org.apache.hadoop.conf.Configuration conf,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class... classes)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Deprecated. rely on addDependencyJars(Job)
 instead.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
index ccdf2ba..10fe6a3 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
@@ -396,7 +396,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getCurrentKey
-public ImmutableBytesWritable getCurrentKey()
+public ImmutableBytesWritable getCurrentKey()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOExc

[23/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/index.html
--
diff --git a/devapidocs/index.html b/devapidocs/index.html
index fd64b07..eba7792 100644
--- a/devapidocs/index.html
+++ b/devapidocs/index.html
@@ -6,11 +6,12 @@
 
 Apache HBase 2.0.0-SNAPSHOT API
 

[32/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index e237805..b2f1221 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -202,360 +202,393 @@
 194   * Add a new remote slave cluster for 
replication.
 195   * @param id a short name that 
identifies the cluster
 196   * @param peerConfig configuration for 
the replication slave cluster
-197   */
-198  public void addPeer(String id, 
ReplicationPeerConfig peerConfig) throws ReplicationException,
-199  IOException {
-200
checkNamespacesAndTableCfsConfigConflict(peerConfig.getNamespaces(),
-201  peerConfig.getTableCFsMap());
-202this.admin.addReplicationPeer(id, 
peerConfig);
-203  }
-204
-205  /**
-206   *  @deprecated as release of 2.0.0, 
and it will be removed in 3.0.0
-207   * */
-208  @Deprecated
-209  public static Map> parseTableCFsFromConfig(String tableCFsConfig) {
-210return 
ReplicationSerDeHelper.parseTableCFsFromConfig(tableCFsConfig);
-211  }
-212
-213  public void updatePeerConfig(String id, 
ReplicationPeerConfig peerConfig) throws IOException {
-214
this.admin.updateReplicationPeerConfig(id, peerConfig);
+197   * @deprecated use
+198   * {@link 
org.apache.hadoop.hbase.client.Admin#addReplicationPeer(String, 
ReplicationPeerConfig)}
+199   * instead
+200   */
+201  @Deprecated
+202  public void addPeer(String id, 
ReplicationPeerConfig peerConfig) throws ReplicationException,
+203  IOException {
+204
checkNamespacesAndTableCfsConfigConflict(peerConfig.getNamespaces(),
+205  peerConfig.getTableCFsMap());
+206this.admin.addReplicationPeer(id, 
peerConfig);
+207  }
+208
+209  /**
+210   *  @deprecated as release of 2.0.0, 
and it will be removed in 3.0.0
+211   * */
+212  @Deprecated
+213  public static Map> parseTableCFsFromConfig(String tableCFsConfig) {
+214return 
ReplicationSerDeHelper.parseTableCFsFromConfig(tableCFsConfig);
 215  }
 216
 217  /**
-218   * Removes a peer cluster and stops the 
replication to it.
-219   * @param id a short name that 
identifies the cluster
-220   */
-221  public void removePeer(String id) 
throws IOException {
-222
this.admin.removeReplicationPeer(id);
-223  }
-224
-225  /**
-226   * Restart the replication stream to 
the specified peer.
-227   * @param id a short name that 
identifies the cluster
-228   */
-229  public void enablePeer(String id) 
throws IOException {
-230
this.admin.enableReplicationPeer(id);
-231  }
-232
-233  /**
-234   * Stop the replication stream to the 
specified peer.
-235   * @param id a short name that 
identifies the cluster
-236   */
-237  public void disablePeer(String id) 
throws IOException {
-238
this.admin.disableReplicationPeer(id);
-239  }
-240
-241  /**
-242   * Get the number of slave clusters the 
local cluster has.
-243   * @return number of slave clusters
-244   * @throws IOException
-245   */
-246  public int getPeersCount() throws 
IOException {
-247return 
this.admin.listReplicationPeers().size();
-248  }
-249
-250  public Map listPeerConfigs() throws IOException {
-251
List peers = 
this.admin.listReplicationPeers();
-252Map result = new TreeMap();
-253for (ReplicationPeerDescription peer 
: peers) {
-254  result.put(peer.getPeerId(), 
peer.getPeerConfig());
-255}
-256return result;
+218   * @deprecated use
+219   * {@link 
org.apache.hadoop.hbase.client.Admin#updateReplicationPeerConfig(String, 
ReplicationPeerConfig)}
+220   * instead
+221   */
+222  @Deprecated
+223  public void updatePeerConfig(String id, 
ReplicationPeerConfig peerConfig) throws IOException {
+224
this.admin.updateReplicationPeerConfig(id, peerConfig);
+225  }
+226
+227  /**
+228   * Removes a peer cluster and stops the 
replication to it.
+229   * @param id a short name that 
identifies the cluster
+230   * @deprecated use {@link 
org.apache.hadoop.hbase.client.Admin#removeReplicationPeer(String)} instead
+231   */
+232  @Deprecated
+233  public void removePeer(String id) 
throws IOException {
+234
this.admin.removeReplicationPeer(id);
+235  }
+236
+237  /**
+238   * Restart the replication stream to 
the specified peer.
+239   * @param id a short name that 
identifies the cluster
+240   * @deprecated use {@link 
org.apache.hadoop.hbase.client.Admin#enableReplicationPeer(String)}
+241   * in

[16/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
index 78e0360..83928fa 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
@@ -377,11 +377,11 @@
 
 
 void
-BaseMasterObserver.start(CoprocessorEnvironment ctx) 
+BaseMasterAndRegionObserver.start(CoprocessorEnvironment ctx) 
 
 
 void
-BaseRegionServerObserver.start(CoprocessorEnvironment env) 
+BaseRegionObserver.start(CoprocessorEnvironment e) 
 
 
 void
@@ -393,11 +393,7 @@
 
 
 void
-MultiRowMutationEndpoint.start(CoprocessorEnvironment env)
-Stores a reference to the coprocessor environment provided 
by the
- RegionCoprocessorHost 
from the region where this
- coprocessor is loaded.
-
+BaseRegionServerObserver.start(CoprocessorEnvironment env) 
 
 
 void
@@ -405,11 +401,15 @@
 
 
 void
-BaseMasterAndRegionObserver.start(CoprocessorEnvironment ctx) 
+MultiRowMutationEndpoint.start(CoprocessorEnvironment env)
+Stores a reference to the coprocessor environment provided 
by the
+ RegionCoprocessorHost 
from the region where this
+ coprocessor is loaded.
+
 
 
 void
-BaseRegionObserver.start(CoprocessorEnvironment e) 
+BaseMasterObserver.start(CoprocessorEnvironment ctx) 
 
 
 void
@@ -421,11 +421,11 @@
 
 
 void
-BaseMasterObserver.stop(CoprocessorEnvironment ctx) 
+BaseMasterAndRegionObserver.stop(CoprocessorEnvironment ctx) 
 
 
 void
-BaseRegionServerObserver.stop(CoprocessorEnvironment env) 
+BaseRegionObserver.stop(CoprocessorEnvironment e) 
 
 
 void
@@ -433,7 +433,7 @@
 
 
 void
-MultiRowMutationEndpoint.stop(CoprocessorEnvironment env) 
+BaseRegionServerObserver.stop(CoprocessorEnvironment env) 
 
 
 void
@@ -441,11 +441,11 @@
 
 
 void
-BaseMasterAndRegionObserver.stop(CoprocessorEnvironment ctx) 
+MultiRowMutationEndpoint.stop(CoprocessorEnvironment env) 
 
 
 void
-BaseRegionObserver.stop(CoprocessorEnvironment e) 
+BaseMasterObserver.stop(CoprocessorEnvironment ctx) 
 
 
 void
@@ -467,11 +467,15 @@
 
 
 void
-ZooKeeperScanPolicyObserver.start(CoprocessorEnvironment e) 
+RowCountEndpoint.start(CoprocessorEnvironment env)
+Stores a reference to the coprocessor environment provided 
by the
+ RegionCoprocessorHost 
from the region where this
+ coprocessor is loaded.
+
 
 
 void
-ExampleRegionObserverWithMetrics.start(CoprocessorEnvironment env) 
+ZooKeeperScanPolicyObserver.start(CoprocessorEnvironment e) 
 
 
 void
@@ -479,11 +483,7 @@
 
 
 void
-RowCountEndpoint.start(CoprocessorEnvironment env)
-Stores a reference to the coprocessor environment provided 
by the
- RegionCoprocessorHost 
from the region where this
- coprocessor is loaded.
-
+ExampleRegionObserverWithMetrics.start(CoprocessorEnvironment env) 
 
 
 void
@@ -491,11 +491,11 @@
 
 
 void
-ZooKeeperScanPolicyObserver.stop(CoprocessorEnvironment e) 
+RowCountEndpoint.stop(CoprocessorEnvironment env) 
 
 
 void
-ExampleRegionObserverWithMetrics.stop(CoprocessorEnvironment e) 
+ZooKeeperScanPolicyObserver.stop(CoprocessorEnvironment e) 
 
 
 void
@@ -503,7 +503,7 @@
 
 
 void
-RowCountEndpoint.stop(CoprocessorEnvironment env) 
+ExampleRegionObserverWithMetrics.stop(CoprocessorEnvironment e) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/DoNotRetryIOException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/DoNotRetryIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/DoNotRetryIOException.html
index 3f986c0..43eadfd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/DoNotRetryIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/DoNotRetryIOException.html
@@ -130,14 +130,18 @@
  
 
 
-org.apache.hadoop.hbase.security
+org.apache.hadoop.hbase.rsgroup
  
 
 
-org.apache.hadoop.hbase.security.visibility
+org.apache.hadoop.hbase.security
  
 
 
+org.apache.hadoop.hbase.security.visibility
+ 
+
+
 org.apache.hadoop.hbase.snapshot
  
 
@@ -275,8 +279,9 @@
 
 
 private void
-ClientScanner.handleScanError(DoNotRetryIOException e,
-   
org.apache.commons.lang.mutable.MutableBoolean retryAfterOutOfOrderException) 
+ClientScanner.handleScanError(DoNotRetryIOException e,
+   
org.apache.commons.lang.mutable.MutableBoolean retryAfterOutOfOrderException,
+   int retriesLeft) 
 
 
 
@@ -313,8 +318,9 @@
 
 
 private void
-ClientScanner.handleScanError(DoNotRetryIOException e,
-   
org.apache.commons.lang.mutable.MutableBoolean retryAfterOutOfOrderException) 
+ClientScanner.handleScanError(DoNotRetryIOException e,
+   
org.apache.comm

[46/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 356df7a..4f61fb9 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1149,7 +1149,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Result.create(Cell[] cells,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean exists,
   boolean stale,
-  boolean partial) 
+  boolean mayHaveMoreCellsInRow) 
 
 
 
@@ -1185,15 +1185,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
   boolean partial) 
 
 
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
-
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
 Put
 Put.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
@@ -1222,30 +1222,38 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cell cell) 
+ColumnPrefixFilter.getNextCellHint(Cell cell) 
 
 
+Cell
+MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
+
+
 abstract Cell
 Filter.getNextCellHint(Cell currentCell)
 If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
  the next key it must seek to.
 
 
+
+Cell
+FilterList.getNextCellHint(Cell currentCell) 
+
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cell cell) 
+MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cell cell) 
+FuzzyRowFilter.getNextCellHint(Cell currentCell) 
 
 
 Cell
-MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
+ColumnRangeFilter.getNextCellHint(Cell cell) 
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(Cell currentCell) 
+ColumnPaginationFilter.getNextCellHint(Cell cell) 
 
 
 Cell
@@ -1255,11 +1263,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-FilterList.getNextCellHint(Cell currentCell) 
+KeyOnlyFilter.transformCell(Cell cell) 
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cell cell) 
+WhileMatchFilter.transformCell(Cell v) 
 
 
 Cell
@@ -1273,14 +1281,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Cell
-KeyOnlyFilter.transformCell(Cell cell) 
-
-
-Cell
-WhileMatchFilter.transformCell(Cell v) 
-
-
-Cell
 FilterList.transformCell(Cell c) 
 
 
@@ -1318,224 +1318,232 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cell cell) 
+ColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cell cell) 
+MultipleColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filt

[19/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index e2300eb..b9de745 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
@@ -366,11 +366,11 @@
 
 
 private Abortable
-SimpleRpcScheduler.abortable 
+RpcExecutor.abortable 
 
 
 private Abortable
-RpcExecutor.abortable 
+SimpleRpcScheduler.abortable 
 
 
 
@@ -611,11 +611,9 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
   PriorityFunction priority,
-  Abortable server)
-Constructs a RpcScheduler.
-
+  Abortable server) 
 
 
 RpcScheduler
@@ -625,24 +623,26 @@
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
   PriorityFunction priority,
-  Abortable server) 
+  Abortable server)
+Constructs a RpcScheduler.
+
 
 
 EntityLock
-HRegionServer.regionLock(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionInfos,
-  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String description,
-  Abortable abort) 
-
-
-EntityLock
 RegionServerServices.regionLock(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionInfos,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String description,
   Abortable abort)
 Master based locks on namespaces/tables/regions.
 
 
+
+EntityLock
+HRegionServer.regionLock(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List regionInfos,
+  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String description,
+  Abortable abort) 
+
 
 
 



[26/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 7a7bfb8..61a5c16 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3756,28 +3756,28 @@
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 date
-"Sun Feb  5 14:39:15 UTC 2017"
+"Fri Feb 17 14:38:32 UTC 2017"
 
 
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 revision
-"26a94844f533b95db1f0a58d6a7cc3dc4a7a7098"
+"7763dd6688254d37ad611f5d290db47c83cf93d3"
 
 
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 srcChecksum
-"57ddabc214fb1ca2473372238917e103"
+"a08ec2b58a0d3951d83d356fe07e492c"
 
 
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 url
-"git://asf912.gq1.ygridcore.net/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase"
+"git://priapus.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase"
 
 
 
@@ -8975,104 +8975,6 @@
 "authorizationSuccesses"
 
 
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_BUSY_NAME
-"exceptions.RegionTooBusyException"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_CALL_QUEUE_TOO_BIG
-"exceptions.callQueueTooBig"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC
-"Call queue is full"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_DESC
-"Exceptions caused by requests"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_MOVED_NAME
-"exceptions.RegionMovedException"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_MULTI_TOO_LARGE_DESC
-"A response to a multi request was too large and the 
rest of the requests will have to be retried."
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_MULTI_TOO_LARGE_NAME
-"exceptions.multiResponseTooLarge"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_NAME
-"exceptions"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_NSRE_NAME
-"exceptions.NotServingRegionException"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_OOO_NAME
-"exceptions.OutOfOrderScannerNextException"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_SANITY_NAME
-"exceptions.FailedSanityCheckException"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_SCANNER_RESET_NAME
-"exceptions.ScannerResetException"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_TYPE_DESC
-"Number of requests that resulted in the specified 
type of Exception"
-
-
-
-
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-EXCEPTIONS_UNKNOWN_NAME
-"exceptions.UnknownScannerException"
-
-
 
 
 public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
@@ -13174,6 +13076,116 @@
 
 
 
+org.apache.hadoop.hbase.metrics.ExceptionTrackingSource 
+
+Modifier and Type
+Constant Field
+Value
+
+
+
+
+
+public static fin

[34/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
index c00f30a..07dc9f6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
@@ -29,1958 +29,1984 @@
 021import java.io.Closeable;
 022import java.io.IOException;
 023import java.util.ArrayList;
-024import java.util.List;
-025import java.util.Map;
-026import java.util.concurrent.Future;
-027import java.util.regex.Pattern;
-028
-029import 
org.apache.hadoop.conf.Configuration;
-030import 
org.apache.hadoop.hbase.Abortable;
-031import 
org.apache.hadoop.hbase.ClusterStatus;
-032import 
org.apache.hadoop.hbase.HColumnDescriptor;
-033import 
org.apache.hadoop.hbase.HRegionInfo;
-034import 
org.apache.hadoop.hbase.HTableDescriptor;
-035import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-036import 
org.apache.hadoop.hbase.NamespaceNotFoundException;
-037import 
org.apache.hadoop.hbase.ProcedureInfo;
-038import 
org.apache.hadoop.hbase.RegionLoad;
-039import 
org.apache.hadoop.hbase.ServerName;
-040import 
org.apache.hadoop.hbase.TableExistsException;
-041import 
org.apache.hadoop.hbase.TableName;
-042import 
org.apache.hadoop.hbase.TableNotFoundException;
-043import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-044import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-045import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-046import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-047import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-048import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-049import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-050import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-051import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-052import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-053import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-054import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-055import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-056import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-057import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-058import 
org.apache.hadoop.hbase.util.Pair;
-059
-060/**
-061 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
-062 * call {@link #close()} afterwards.
-063 * 

Admin can be used to create, drop, list, enable and disable tables, add and drop table -064 * column families and other administrative operations. -065 * -066 * @see ConnectionFactory -067 * @see Connection -068 * @see Table -069 * @since 0.99.0 -070 */ -071@InterfaceAudience.Public -072@InterfaceStability.Evolving -073public interface Admin extends Abortable, Closeable { -074 int getOperationTimeout(); -075 -076 @Override -077 void abort(String why, Throwable e); -078 -079 @Override -080 boolean isAborted(); -081 -082 /** -083 * @return Connection used by this object. -084 */ -085 Connection getConnection(); -086 -087 /** -088 * @param tableName Table to check. -089 * @return True if table exists already. -090 * @throws IOException -091 */ -092 boolean tableExists(final TableName tableName) throws IOException; -093 -094 /** -095 * List all the userspace tables. -096 * -097 * @return - returns an array of HTableDescriptors -098 * @throws IOException if a remote or network exception occurs -099 */ -100 HTableDescriptor[] listTables() throws IOException; -101 -102 /** -103 * List all the userspace tables matching the given pattern. -104 * -105 * @param pattern The compiled regular expression to match against -106 * @return - returns an array of HTableDescriptors -107 * @throws IOException if a remote or network exception occurs -108 * @see #listTables() -109 */ -110 HTableDescriptor[] listTables(Pattern pattern) throws IOException; -111 -112 /** -113 * List all the userspace tables matching the given regular expression. -114 * -115 * @param regex The regular expression to match against -116 * @return - returns an array of HTableDescriptors -117 * @throws IOException if a remote or network exception occurs -118 * @see #listTables(java.util.regex.Pattern) -119 */ -120 HTableDescriptor[] listTables(String regex) throws IOException; -121 -122 /** -123 * List all the tables matching the given pattern. -124 * -125 * @param pattern The compiled regular expression to match against -126 * @param includeSysTables False to match only against userspace tables -127 * @return - returns an array of HTableDescriptors -128 * @throws IOException if a remote or ne


[51/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/ce958bce
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/ce958bce
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/ce958bce

Branch: refs/heads/asf-site
Commit: ce958bce81bbfd1487ee8b3841eef735545c5577
Parents: 2f960d3
Author: jenkins 
Authored: Fri Feb 17 15:01:46 2017 +
Committer: Michael Stack 
Committed: Fri Feb 17 15:35:07 2017 -0800

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 27815 ++---
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/allclasses-frame.html   | 1 +
 apidocs/allclasses-noframe.html | 1 +
 apidocs/deprecated-list.html|   142 +-
 apidocs/index-all.html  |   181 +-
 apidocs/index.html  |11 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/ProcedureState.html | 4 +-
 apidocs/org/apache/hadoop/hbase/ServerName.html |   154 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   200 +-
 .../hbase/class-use/HColumnDescriptor.html  |20 +-
 .../hadoop/hbase/class-use/HRegionInfo.html | 9 +-
 .../hbase/class-use/HTableDescriptor.html   |24 +-
 .../hadoop/hbase/class-use/ServerName.html  |23 +-
 .../hadoop/hbase/class-use/TableName.html   |   187 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |   764 +-
 .../hadoop/hbase/client/CompactionState.html| 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../org/apache/hadoop/hbase/client/Result.html  |   148 +-
 .../hadoop/hbase/client/SnapshotType.html   | 4 +-
 .../hadoop/hbase/client/class-use/Append.html   | 8 +-
 .../hbase/client/class-use/Consistency.html | 8 +-
 .../hadoop/hbase/client/class-use/Delete.html   |44 +-
 .../hbase/client/class-use/Durability.html  |16 +-
 .../hadoop/hbase/client/class-use/Get.html  |36 +-
 .../hbase/client/class-use/Increment.html   | 8 +-
 .../hbase/client/class-use/IsolationLevel.html  | 8 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hadoop/hbase/client/class-use/Put.html  |48 +-
 .../hadoop/hbase/client/class-use/Result.html   |32 +-
 .../hbase/client/class-use/ResultScanner.html   |26 +-
 .../hadoop/hbase/client/class-use/Row.html  | 4 +-
 .../hbase/client/class-use/RowMutations.html|16 +-
 .../hadoop/hbase/client/class-use/Scan.html |10 +-
 .../hadoop/hbase/client/package-tree.html   |12 +-
 .../client/replication/ReplicationAdmin.html|   169 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../filter/class-use/ByteArrayComparable.html   | 8 +-
 .../class-use/CompareFilter.CompareOp.html  |44 +-
 .../filter/class-use/Filter.ReturnCode.html |66 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |46 +-
 .../hadoop/hbase/filter/package-tree.html   | 2 +-
 .../io/class-use/ImmutableBytesWritable.html|46 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|20 +-
 .../hbase/io/crypto/class-use/Cipher.html   |16 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../hbase/mapred/TableRecordReaderImpl.html |10 +-
 .../hbase/mapreduce/TableMapReduceUtil.html | 6 +-
 .../hbase/mapreduce/TableRecordReaderImpl.html  |10 +-
 .../hadoop/hbase/namespace/package-summary.html | 4 +-
 .../hadoop/hbase/namespace/package-tree.html| 4 +-
 .../hadoop/hbase/nio/package-summary.html   | 4 +-
 .../apache/hadoop/hbase/nio/package-tree.html   | 4 +-
 .../apache/hadoop/hbase/package-summary.html| 2 +-
 .../org/apache/hadoop/hbase/package-use.html| 8 +-
 .../apache/hadoop/hbase/quotas/QuotaScope.html  | 4 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hadoop/hbase/quotas/ThrottleType.html   | 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 2 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 .../class-use/ReplicationException.html |44 +-
 .../class-use/ReplicationPeerConfig.html|28 +-
 .../hadoop/hbase/replication/package-use.html   | 7 +-
 .../hadoop/hbase/rsgroup/RSGroupInfo.html   |56 +-
 .../hadoop/hbase/util/class-use/ByteRange.html  |   126 +-
 .../hadoop/hbase/util/class-use/Order.html  |44 +-
 .../hadoop/hbase/util/class-use/Pair.html   |16 +
 .../util/class-use/PositionedByteRange.html |   238 +-
 apidocs/overview-frame.html | 1 +
 apidocs/overview-summary.html 

[08/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
index e6c1206..6a991e9 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
@@ -134,92 +134,105 @@
 
 
 
+class 
+ByteBufferKeyValue
+This Cell is an implementation of ByteBufferCell where the data 
resides in
+ off heap/ on heap ByteBuffer
+
+
+
 private static class 
 CellUtil.EmptyByteBufferCell 
 
-
+
 private static class 
 CellUtil.EmptyCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowByteBufferCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColByteBufferCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColTSByteBufferCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColTSCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowDeleteFamilyCell 
 
-
+
 private static class 
 CellUtil.LastOnRowByteBufferCell 
 
-
+
 private static class 
 CellUtil.LastOnRowCell 
 
-
+
 private static class 
 CellUtil.LastOnRowColByteBufferCell 
 
-
+
 private static class 
 CellUtil.LastOnRowColCell 
 
-
+
 private static class 
 CellUtil.TagRewriteByteBufferCell 
 
-
+
 private static class 
 CellUtil.TagRewriteCell
 This can be used when a Cell has to change with 
addition/removal of one or more tags.
 
 
-
+
 private static class 
 CellUtil.ValueAndTagRewriteByteBufferCell 
 
-
+
 private static class 
 CellUtil.ValueAndTagRewriteCell 
 
-
+
 class 
 IndividualBytesFieldCell 
 
-
+
 class 
 KeyValue
 An HBase Key/Value.
 
 
-
+
 static class 
 KeyValue.KeyOnlyKeyValue
 A simple form of KeyValue that creates a keyvalue with only 
the key part of the byte[]
  Mainly used in places where we need to compare two cells.
 
 
+
+class 
+NoTagsByteBufferKeyValue
+An extension of the ByteBufferKeyValue where the tags 
length is always 0
+
+
 
 class 
 NoTagsKeyValue
@@ -228,18 +241,11 @@
 
 
 class 
-OffheapKeyValue
-This Cell is an implementation of ByteBufferCell where the data 
resides in off heap
- memory.
-
-
-
-class 
 SizeCachedKeyValue
 This class is an extension to KeyValue where rowLen and 
keyLen are cached.
 
 
-
+
 class 
 SizeCachedNoTagsKeyValue
 This class is an extension to ContentSizeCachedKeyValue 
where there are no tags in Cell.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
index 347889c..67afe44 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
@@ -126,40 +126,53 @@
 
 
 
+class 
+ByteBufferKeyValue
+This Cell is an implementation of ByteBufferCell where the data 
resides in
+ off heap/ on heap ByteBuffer
+
+
+
 private static class 
 CellUtil.TagRewriteByteBufferCell 
 
-
+
 private static class 
 CellUtil.TagRewriteCell
 This can be used when a Cell has to change with 
addition/removal of one or more tags.
 
 
-
+
 private static class 
 CellUtil.ValueAndTagRewriteByteBufferCell 
 
-
+
 private static class 
 CellUtil.ValueAndTagRewriteCell 
 
-
+
 class 
 IndividualBytesFieldCell 
 
-
+
 class 
 KeyValue
 An HBase Key/Value.
 
 
-
+
 static class 
 KeyValue.KeyOnlyKeyValue
 A simple form of KeyValue that creates a keyvalue with only 
the key part of the byte[]
  Mainly used in places where we need to compare two cells.
 
 
+
+class 
+NoTagsByteBufferKeyValue
+An extension of the ByteBufferKeyValue where the tags 
length is always 0
+
+
 
 class 
 NoTagsKeyValue
@@ -168,18 +181,11 @@
 
 
 class 
-OffheapKeyValue
-This Cell is an implementation of ByteBufferCell where the data 
resides in off heap
- memory.
-
-
-
-class 
 SizeCachedKeyValue
 This class is an extension to KeyValue where rowLen and 
keyLen are cached.
 
 
-
+
 class 
 SizeCachedNoTagsKeyValue
 This class is an extension to ContentSizeCachedKeyValue 
where there are no tags in Cell.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
index aab238a..5729c7c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Stoppable.html
+++ b/devapidocs/org/apache/hadoop/hbase/cl

[21/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/OffheapTag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/OffheapTag.html 
b/devapidocs/org/apache/hadoop/hbase/OffheapTag.html
deleted file mode 100644
index d5a8af6..000
--- a/devapidocs/org/apache/hadoop/hbase/OffheapTag.html
+++ /dev/null
@@ -1,489 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-OffheapTag (Apache HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase
-Class OffheapTag
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.OffheapTag
-
-
-
-
-
-
-
-All Implemented Interfaces:
-Tag
-
-
-
-@InterfaceAudience.Private
- @InterfaceStability.Evolving
-public class OffheapTag
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
-implements Tag
-This is a Tag implementation in which value is 
backed by an off heap
- http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields 
-
-Modifier and Type
-Field and Description
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-buffer 
-
-
-private int
-length 
-
-
-private int
-offset 
-
-
-private byte
-type 
-
-
-
-
-
-
-Fields inherited from interface org.apache.hadoop.hbase.Tag
-INFRASTRUCTURE_SIZE,
 MAX_TAG_LENGTH,
 TAG_LENGTH_SIZE,
 TYPE_LENGTH_SIZE
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors 
-
-Constructor and Description
-
-
-OffheapTag(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer buffer,
-  int offset,
-  int length) 
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All Methods Instance Methods Concrete Methods 
-
-Modifier and Type
-Method and Description
-
-
-byte
-getType() 
-
-
-byte[]
-getValueArray() 
-
-
-http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-getValueByteBuffer() 
-
-
-int
-getValueLength() 
-
-
-int
-getValueOffset() 
-
-
-boolean
-hasArray()
-Tells whether or not this Tag is backed by a byte 
array.
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-toString() 
-
-
-
-
-
-
-Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Obj

[36/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/overview-frame.html
--
diff --git a/apidocs/overview-frame.html b/apidocs/overview-frame.html
index adc0b67..7811c0c 100644
--- a/apidocs/overview-frame.html
+++ b/apidocs/overview-frame.html
@@ -49,6 +49,7 @@
 org.apache.hadoop.hbase.mob
 org.apache.hadoop.hbase.mob.compactions
 org.apache.hadoop.hbase.namespace
+org.apache.hadoop.hbase.net
 org.apache.hadoop.hbase.nio
 org.apache.hadoop.hbase.quotas
 org.apache.hadoop.hbase.regionserver

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/overview-summary.html
--
diff --git a/apidocs/overview-summary.html b/apidocs/overview-summary.html
index 60dafb3..366fd0b 100644
--- a/apidocs/overview-summary.html
+++ b/apidocs/overview-summary.html
@@ -250,78 +250,82 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  
 
 
-org.apache.hadoop.hbase.nio
+org.apache.hadoop.hbase.net
  
 
 
-org.apache.hadoop.hbase.quotas
+org.apache.hadoop.hbase.nio
  
 
 
-org.apache.hadoop.hbase.regionserver
+org.apache.hadoop.hbase.quotas
  
 
 
-org.apache.hadoop.hbase.regionserver.querymatcher
+org.apache.hadoop.hbase.regionserver
  
 
 
-org.apache.hadoop.hbase.regionserver.throttle
+org.apache.hadoop.hbase.regionserver.querymatcher
  
 
 
-org.apache.hadoop.hbase.regionserver.wal
+org.apache.hadoop.hbase.regionserver.throttle
  
 
 
+org.apache.hadoop.hbase.regionserver.wal
+ 
+
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
  
 
-
+
 org.apache.hadoop.hbase.rsgroup
  
 
-
+
 org.apache.hadoop.hbase.security
  
 
-
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf
  
 
-
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler
  
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf
  
 
-
+
 org.apache.hadoop.hbase.snapshot
  
 
-
+
 org.apache.hadoop.hbase.spark
  
 
-
+
 org.apache.hadoop.hbase.spark.example.hbasecontext
  
 
-
+
 org.apache.hadoop.hbase.types
 
 
@@ -329,23 +333,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  extensible data type API.
 
 
-
+
 org.apache.hadoop.hbase.util
  
 
-
+
 org.apache.hadoop.hbase.util.hbck
  
 
-
+
 org.apache.hadoop.hbase.wal
  
 
-
+
 org.apache.hadoop.hbase.zookeeper
  
 
-
+
 org.apache.hbase.archetypes.exemplars.client
 
 This package provides fully-functional exemplar Java code 
demonstrating
@@ -353,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  archetype with hbase-client dependency.
 
 
-
+
 org.apache.hbase.archetypes.exemplars.shaded_client
 
 This package provides fully-functional exemplar Java code 
demonstrating

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 144d9d9..9e87724 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -109,6 +109,7 @@
 org.apache.hadoop.hbase.mob,
 
 org.apache.hadoop.hbase.mob.compactions,
 
 org.apache.hadoop.hbase.namespace,
 
+org.apache.hadoop.hbase.net,
 
 org.apache.hadoop.hbase.nio,
 
 org.apache.hadoop.hbase.quotas,
 
 org.apache.hadoop.hbase.regionserver,
 
@@ -164,6 +165,7 @@
 org.apache.hadoop.hbase.snapshot.SnapshotInfo
 
 
+org.apache.hadoop.hbase.net.Address 
(implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable)
 org.apache.hadoop.hbase.AuthUtil
 org.apache.hadoop.hbase.util.Base64
 org.apache.hadoop.hbase.client.BufferedMutatorParams (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true";
 title="class or interface in java.lang">Cloneable)
@@ -875,29 +877,29 @@
 org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.MemoryCompactionPolicy
 org.apache.hadoop.hbase.ProcedureState
-org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
+org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client

[20/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/ServerName.html
index 14e3afd..947c9e6 100644
--- a/devapidocs/org/apache/hadoop/hbase/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/ServerName.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":9,"i5":10,"i6":10,"i7":10,"i8":9,"i9":9,"i10":9,"i11":9,"i12":10,"i13":10,"i14":10,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":10,"i23":10,"i24":9,"i25":9,"i26":9};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":41,"i6":10,"i7":10,"i8":41,"i9":41,"i10":41,"i11":41,"i12":10,"i13":10,"i14":10,"i15":9,"i16":9,"i17":41,"i18":41,"i19":9,"i20":41,"i21":9,"i22":10,"i23":10,"i24":9,"i25":9,"i26":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -119,12 +119,12 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class ServerName
+public class ServerName
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable
-Instance of an HBase ServerName.
- A server name is used uniquely identifying a server instance in a cluster and 
is made
- of the combination of hostname, port, and startcode.  The startcode 
distingushes restarted
+Name of a particular incarnation of an HBase Server.
+ A ServerName is used uniquely 
identifying a server instance in a cluster and is made
+ of the combination of hostname, port, and startcode.  The startcode 
distinguishes restarted
  servers on same hostname and port (startcode is usually timestamp of server 
startup). The
  toString()
 format of ServerName is safe to use in the  filesystem and as znode name
  up in ZooKeeper.  Its format is:
@@ -133,10 +133,14 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
  For example, if hostname is www.example.org, port is 
1234,
  and the startcode for the regionserver is 1212121212, then
  the toString()
 would be www.example.org,1234,1212121212.
- 
+
  You can obtain a versioned serialized form of this class by calling
- getVersionedBytes().
  To deserialize, call parseVersionedServerName(byte[])
- 
+ getVersionedBytes().
  To deserialize, call
+ parseVersionedServerName(byte[]).
+
+ Use getAddress()
 to obtain the Server hostname + port
+ (Endpoint/Socket Address).
+
  Immutable.
 
 See Also:
@@ -161,33 +165,25 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 Field and Description
 
 
+private Address
+address 
+
+
 private byte[]
 bytes
 Cached versioned bytes of this ServerName instance.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 EMPTY_SERVER_LIST 
 
-
-private 
com.google.common.net.HostAndPort
-hostAndPort 
-
 
-private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-hostnameOnly 
-
-
 static int
 NON_STARTCODE
 What to use if no startcode supplied.
 
 
-
-private int
-port 
-
 
 private static long
 serialVersionUID 
@@ -244,15 +240,20 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 private 
-ServerName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String serverName) 
+ServerName(Address address,
+  long startcode) 
 
 
+private 
+ServerName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String serverName) 
+
+
 protected 
 ServerName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String hostname,
   int port,
   long startcode) 
 
-
+
 private 
 ServerName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String hostAndPort,
   long startCode) 
@@ -267,7 +268,7 @@ implements http://docs.oracle.com/javase/8/docs/api/

[18/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 5707f1a..84fea06 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -281,92 +281,105 @@ service.
 
 
 
+class 
+ByteBufferKeyValue
+This Cell is an implementation of ByteBufferCell where the data 
resides in
+ off heap/ on heap ByteBuffer
+
+
+
 private static class 
 CellUtil.EmptyByteBufferCell 
 
-
+
 private static class 
 CellUtil.EmptyCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowByteBufferCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColByteBufferCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColTSByteBufferCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowColTSCell 
 
-
+
 private static class 
 CellUtil.FirstOnRowDeleteFamilyCell 
 
-
+
 private static class 
 CellUtil.LastOnRowByteBufferCell 
 
-
+
 private static class 
 CellUtil.LastOnRowCell 
 
-
+
 private static class 
 CellUtil.LastOnRowColByteBufferCell 
 
-
+
 private static class 
 CellUtil.LastOnRowColCell 
 
-
+
 private static class 
 CellUtil.TagRewriteByteBufferCell 
 
-
+
 private static class 
 CellUtil.TagRewriteCell
 This can be used when a Cell has to change with 
addition/removal of one or more tags.
 
 
-
+
 private static class 
 CellUtil.ValueAndTagRewriteByteBufferCell 
 
-
+
 private static class 
 CellUtil.ValueAndTagRewriteCell 
 
-
+
 class 
 IndividualBytesFieldCell 
 
-
+
 class 
 KeyValue
 An HBase Key/Value.
 
 
-
+
 static class 
 KeyValue.KeyOnlyKeyValue
 A simple form of KeyValue that creates a keyvalue with only 
the key part of the byte[]
  Mainly used in places where we need to compare two cells.
 
 
+
+class 
+NoTagsByteBufferKeyValue
+An extension of the ByteBufferKeyValue where the tags 
length is always 0
+
+
 
 class 
 NoTagsKeyValue
@@ -375,18 +388,11 @@ service.
 
 
 class 
-OffheapKeyValue
-This Cell is an implementation of ByteBufferCell where the data 
resides in off heap
- memory.
-
-
-
-class 
 SizeCachedKeyValue
 This class is an extension to KeyValue where rowLen and 
keyLen are cached.
 
 
-
+
 class 
 SizeCachedNoTagsKeyValue
 This class is an extension to ContentSizeCachedKeyValue 
where there are no tags in Cell.
@@ -625,41 +631,45 @@ service.
 
 
 Cell
-ExtendedCell.deepClone()
-Does a deep copy of the contents to a new memory area and 
returns it as a new cell.
-
+NoTagsByteBufferKeyValue.deepClone() 
 
 
 Cell
-NoTagsKeyValue.deepClone() 
+CellUtil.TagRewriteCell.deepClone() 
 
 
 Cell
-IndividualBytesFieldCell.deepClone() 
+CellUtil.TagRewriteByteBufferCell.deepClone() 
 
 
 Cell
-OffheapKeyValue.deepClone() 
+CellUtil.ValueAndTagRewriteCell.deepClone() 
 
 
 Cell
-KeyValue.deepClone() 
+CellUtil.ValueAndTagRewriteByteBufferCell.deepClone() 
 
 
 Cell
-CellUtil.TagRewriteCell.deepClone() 
+KeyValue.deepClone() 
 
 
 Cell
-CellUtil.TagRewriteByteBufferCell.deepClone() 
+IndividualBytesFieldCell.deepClone() 
 
 
 Cell
-CellUtil.ValueAndTagRewriteCell.deepClone() 
+NoTagsKeyValue.deepClone() 
 
 
 Cell
-CellUtil.ValueAndTagRewriteByteBufferCell.deepClone() 
+ByteBufferKeyValue.deepClone() 
+
+
+Cell
+ExtendedCell.deepClone()
+Does a deep copy of the contents to a new memory area and 
returns it as a new cell.
+
 
 
 
@@ -758,16 +768,6 @@ service.
 
 
 int
-CellComparator.compare(Cell a,
-   Cell b) 
-
-
-int
-CellComparator.RowComparator.compare(Cell a,
-   Cell b) 
-
-
-int
 KeyValue.MetaComparator.compare(Cell left,
Cell right)
 Deprecated. 
@@ -792,6 +792,16 @@ service.
  
 
 
+int
+CellComparator.compare(Cell a,
+   Cell b) 
+
+
+int
+CellComparator.RowComparator.compare(Cell a,
+   Cell b) 
+
+
 private int
 CellComparator.compare(Cell a,
Cell b,
@@ -967,37 +977,37 @@ service.
 
 
 int
+KeyValue.KVComparator.compareRows(Cell left,
+   Cell right)
+Deprecated. 
+ 
+
+
+int
 CellComparator.compareRows(Cell left,
Cell right)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparator.MetaCellComparator.compareRows(Cell left,
Cell right) 
 
-
+
 int
-KeyValue.KVComparator.compareRows(Cell left,
-   Cell right)
+KeyValue.KVComparator.compareTimestamps(Cell left,
+ Cell right)
 Deprecated. 
  
 
-
+
 static int
 CellComparator.compareTimestamps(Cell left,
  Cell right)
 Compares cell's timestamps in DESCENDING order.
 
 
-
-int
-KeyValue.KVComparator.compareTimestamps(Cell left,
- Cell right)
-Deprecated. 
- 
-
 
 static int
 CellComparator.compareValue(Cell cell,
@@ -1517,32 +1527,32 @@ service.
 
 
 
-boolean
-KeyValue.

[03/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
index 94d14fa..56df122 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":38,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":18,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":38,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":18,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":18,"i108":18,"i109":18,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"i
 
120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":38,"i134":6,"i135":6,"i136":38,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":18,"i148":6,"i149":6,"i150":6,"i151":6,"i152":6,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":18};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":38,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":18,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":18,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":38,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":18,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":18,"i109":18,"i110":18,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"
 
i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":6,"i134":38,"i135":6,"i136":6,"i137":38,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":18,"i149":18,"i150":6,"i151":6,"i152":6,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":18};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public interface Admin
+public interface Admin
 extends Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 The administrative API for HBase. Obtain an instance from 
an Connection.getAdmin()
 and
  call close()
 afterwards.
@@ -198,36 +198,43 @@ extends 
+default void
+appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
+Append the replicable table-cf config of the specified 
peer

[44/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.html
index 6d71c8d..cb43e4e 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":38,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":18,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":38,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":18,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":18,"i108":18,"i109":18,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"i
 
120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":38,"i134":6,"i135":6,"i136":38,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":18,"i148":6,"i149":6,"i150":6,"i151":6,"i152":6,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":18};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":38,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":18,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":18,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":38,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":18,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":18,"i109":18,"i110":18,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"
 
i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":6,"i134":38,"i135":6,"i136":6,"i137":38,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":18,"i149":18,"i150":6,"i151":6,"i152":6,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":18};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -107,7 +107,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public interface Admin
+public interface Admin
 extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 The administrative API for HBase. Obtain an instance from 
an Connection.getAdmin()
 and
  call close()
 afterwards.
@@ -194,36 +194,43 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 
+default void
+appendReplicationPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionStr

[01/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 2f960d3f3 -> c6ddb98fc


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
index 35de701..748b6ce 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder
+public class AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder
 extends AsyncRpcRetryingCallerFactory.BuilderBase
 
 
@@ -278,7 +278,7 @@ extends 
 
 tableName
-private TableName tableName
+private TableName tableName
 
 
 
@@ -287,7 +287,7 @@ extends 
 
 row
-private byte[] row
+private byte[] row
 
 
 
@@ -296,7 +296,7 @@ extends 
 
 callable
-private AsyncSingleRequestRpcRetryingCaller.Callable callable
+private AsyncSingleRequestRpcRetryingCaller.Callable callable
 
 
 
@@ -305,7 +305,7 @@ extends 
 
 operationTimeoutNs
-private long operationTimeoutNs
+private long operationTimeoutNs
 
 
 
@@ -314,7 +314,7 @@ extends 
 
 rpcTimeoutNs
-private long rpcTimeoutNs
+private long rpcTimeoutNs
 
 
 
@@ -323,7 +323,7 @@ extends 
 
 locateType
-private RegionLocateType locateType
+private RegionLocateType locateType
 
 
 
@@ -340,7 +340,7 @@ extends 
 
 SingleRequestCallerBuilder
-public SingleRequestCallerBuilder()
+public SingleRequestCallerBuilder()
 
 
 
@@ -357,7 +357,7 @@ extends 
 
 table
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder table(TableName tableName)
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder table(TableName tableName)
 
 
 
@@ -366,7 +366,7 @@ extends 
 
 row
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder row(byte[] row)
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder row(byte[] row)
 
 
 
@@ -375,7 +375,7 @@ extends 
 
 action
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder action(AsyncSingleRequestRpcRetryingCaller.Callable callable)
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder action(AsyncSingleRequestRpcRetryingCaller.Callable callable)
 
 
 
@@ -384,7 +384,7 @@ extends 
 
 operationTimeout
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder operationTimeout(long operationTimeout,
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder operationTimeout(long operationTimeout,

 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true";
 title="class or interface in 
java.util.concurrent">TimeUnit unit)
 
 
@@ -394,7 +394,7 @@ extends 
 
 rpcTimeout
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder rpcTimeout(long rpcTimeout,
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder rpcTimeout(long rpcTimeout,
   
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true";
 title="class or interface in 
java.util.concurrent">TimeUnit unit)
 
 
@@ -404,7 +404,7 @@ extends 
 
 locateType
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder locateType(RegionLocateType locateType)
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder locateType(RegionLocateType locateType)
 
 
 
@@ -413,7 +413,7 @@ extends 
 
 pause
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder pause(long pause,
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder pause(long pause,
  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true";
 title="class or interface in 
java.util.concurrent">TimeUnit unit)
 
 
@@ -423,7 +423,7 @@ extends 
 
 maxAttempts
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder maxAttempts(int maxAttempts)
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder maxAttempts(int maxAttempts)
 
 
 
@@ -432,7 +432,7 @@ extends 
 
 startLogErrorsCnt
-public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder startLogErrorsCnt(int startLogErrorsCnt)
+public AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder startLogErrorsCnt(int startLogErrorsCnt)
 
 
 
@@ -441,7 +441,7 @@ extends 
 
 build
-public AsyncSingleRequestRpcRetryingCalle

[13/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index a3f46d5..0962a0c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -273,30 +273,30 @@ service.
 HTableMultiplexer.FlushWorker.addr 
 
 
-HRegionLocation
-AsyncClientScanner.OpenScannerResponse.loc 
-
-
 private HRegionLocation
 AsyncScanSingleRegionRpcRetryingCaller.loc 
 
-
+
 HRegionLocation
 AsyncBatchRpcRetryingCaller.RegionRequest.loc 
 
-
+
 private HRegionLocation
 AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc 
 
+
+HRegionLocation
+AsyncClientScanner.OpenScannerResponse.loc 
+
 
 protected HRegionLocation
-RegionServerCallable.location
-Some subclasses want to set their own location.
-
+RegionAdminServiceCallable.location 
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location 
+RegionServerCallable.location
+Some subclasses want to set their own location.
+
 
 
 
@@ -340,11 +340,11 @@ service.
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation() 
+RegionServerCallable.getLocation() 
 
 
 protected HRegionLocation
-RegionServerCallable.getLocation() 
+MultiServerCallable.getLocation() 
 
 
 HRegionLocation
@@ -352,44 +352,44 @@ service.
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row)
+HRegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row)
+RegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row,
+HRegionLocator.getRegionLocation(byte[] row,
  boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row,
+RegionLocator.getRegionLocation(byte[] row,
  boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-ConnectionImplementation.getRegionLocation(TableName tableName,
- byte[] row,
- boolean reload) 
-
-
-HRegionLocation
 ClusterConnection.getRegionLocation(TableName tableName,
  byte[] row,
  boolean reload)
 Find region location hosting passed row
 
 
+
+HRegionLocation
+ConnectionImplementation.getRegionLocation(TableName tableName,
+ byte[] row,
+ boolean reload) 
+
 
 private HRegionLocation
 AsyncRequestFutureImpl.getReplicaLocationOrFail(Action action) 
@@ -403,20 +403,15 @@ service.
 
 
 HRegionLocation
-ConnectionImplementation.locateRegion(byte[] regionName) 
-
-
-HRegionLocation
 ClusterConnection.locateRegion(byte[] regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
-ConnectionImplementation.locateRegion(TableName tableName,
-byte[] row) 
+ConnectionImplementation.locateRegion(byte[] regionName) 
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableName tableName,
 byte[] row)
@@ -424,6 +419,11 @@ service.
  lives in.
 
 
+
+HRegionLocation
+ConnectionImplementation.locateRegion(TableName tableName,
+byte[] row) 
+
 
 private HRegionLocation
 AsyncNonMetaRegionLocator.locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCache tableCache,
@@ -438,17 +438,17 @@ service.
 
 
 HRegionLocation
-ConnectionImplementation.relocateRegion(TableName tableName,
-  byte[] row) 
-
-
-HRegionLocation
 ClusterConnection.relocateRegion(TableName tableName,
   byte[] row)
 Find the location of the region of tableName that 
row
  lives in, ignoring any value that might be in the cache.
 
 
+
+HRegionLocation
+ConnectionImplementation.relocateRegion(TableName tableName,
+  byte[] row) 
+
 
 
 
@@ -460,13 +460,13 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-RegionLocator.getAllRegionLocations()
-Retrieves all of the regions associated with this 
table.
-
+HRegionLocator.getAllRegionLocations() 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HRegionLocator.getAllRegionLocations() 
+RegionLocator.getAllRegionLocations()
+Retrieves all of the regions associated with this 
table.
+
 
 
 private PairList,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-extern

[14/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 5ab276d..31886b5 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -915,7 +915,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-MultiServerCallable.getHRegionInfo() 
+ScannerCallableWithReplicas.getHRegionInfo() 
 
 
 HRegionInfo
@@ -923,11 +923,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo() 
+ScannerCallable.getHRegionInfo() 
 
 
 HRegionInfo
-ScannerCallable.getHRegionInfo() 
+MultiServerCallable.getHRegionInfo() 
 
 
 private HRegionInfo
@@ -1007,6 +1007,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
HRegionInfo hri) 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.closeRegion(ServerName sn,
+   HRegionInfo hri)
+Close a region.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncHBaseAdmin.closeRegion(ServerName sn,
+   HRegionInfo hri) 
+
+
 private void
 HBaseAdmin.compact(ServerName sn,
HRegionInfo hri,
@@ -1259,22 +1271,28 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
+  HRegionInfo regionInfo) 
+
+
+void
 MasterObserver.postAssign(ObserverContext ctx,
   HRegionInfo regionInfo)
 Called after the region assignment has been requested.
 
 
-
+
 void
 BaseMasterObserver.postAssign(ObserverContext ctx,
   HRegionInfo regionInfo) 
 
-
+
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
-  HRegionInfo regionInfo) 
+BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
+  HTableDescriptor desc,
+  HRegionInfo[] regions) 
 
-
+
 void
 MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
   HTableDescriptor desc,
@@ -1282,17 +1300,23 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Called after the createTable operation has been 
requested.
 
 
-
+
 void
 BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
   HTableDescriptor desc,
   HRegionInfo[] regions) 
 
+
+void
+BaseMasterAndRegionObserver.postCompletedMergeRegionsAction(ObserverContext c,
+   HRegionInfo[] regionsToMerge,
+   HRegionInfo mergedRegion) 
+
 
 void
-BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
-  HTableDescriptor desc,
-  HRegionInfo[] regions) 
+BaseMasterAndRegionObserver.postCompletedMergeRegionsAction(ObserverContext c,
+   HRegionInfo[] regionsToMerge,
+   HRegionInfo mergedRegion) 
 
 
 void
@@ -1324,37 +1348,31 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCompletedMergeRegionsAction(ObserverContext c,
-   HRegionInfo[] regionsToMerge,
-   HRegionInfo mergedRegion) 
+BaseMasterAndRegionObserver.postCompletedSplitRegionAction(ObserverContext c,
+  HRegionInfo regionInfoA,
+  HRegionInfo regionInfoB) 
 
 
 void
-BaseMasterAndRegionObserver.postCompletedMergeRegionsAction(ObserverContext c,
-   HRegionInfo[] regionsToMerge,
-   HRegionInfo mergedRegion) 
-
-
-void
 MasterObs

[38/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
index eb84a60..72f56a8 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
@@ -250,18 +250,18 @@
 
 
 ByteRange
-ByteRange.deepCopy()
-Create a new ByteRange with new backing byte[] 
containing a copy
- of the content from this range's window.
-
+SimpleByteRange.deepCopy() 
 
 
 ByteRange
-SimpleByteRange.deepCopy() 
+SimpleMutableByteRange.deepCopy() 
 
 
 ByteRange
-SimpleMutableByteRange.deepCopy() 
+ByteRange.deepCopy()
+Create a new ByteRange with new backing byte[] 
containing a copy
+ of the content from this range's window.
+
 
 
 ByteRange
@@ -281,6 +281,16 @@
 
 
 ByteRange
+SimpleByteRange.put(int index,
+   byte val) 
+
+
+ByteRange
+SimpleMutableByteRange.put(int index,
+   byte val) 
+
+
+ByteRange
 ByteRange.put(int index,
byte val)
 Store val at index.
@@ -288,13 +298,13 @@
 
 
 ByteRange
-SimpleByteRange.put(int index,
-   byte val) 
+SimpleByteRange.put(int index,
+   byte[] val) 
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
-   byte val) 
+SimpleMutableByteRange.put(int index,
+   byte[] val) 
 
 
 ByteRange
@@ -305,13 +315,17 @@
 
 
 ByteRange
-SimpleByteRange.put(int index,
-   byte[] val) 
+SimpleByteRange.put(int index,
+   byte[] val,
+   int offset,
+   int length) 
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
-   byte[] val) 
+SimpleMutableByteRange.put(int index,
+   byte[] val,
+   int offset,
+   int length) 
 
 
 ByteRange
@@ -325,17 +339,13 @@
 
 
 ByteRange
-SimpleByteRange.put(int index,
-   byte[] val,
-   int offset,
-   int length) 
+SimpleByteRange.putInt(int index,
+  int val) 
 
 
 ByteRange
-SimpleMutableByteRange.put(int index,
-   byte[] val,
-   int offset,
-   int length) 
+SimpleMutableByteRange.putInt(int index,
+  int val) 
 
 
 ByteRange
@@ -346,13 +356,13 @@
 
 
 ByteRange
-SimpleByteRange.putInt(int index,
-  int val) 
+SimpleByteRange.putLong(int index,
+   long val) 
 
 
 ByteRange
-SimpleMutableByteRange.putInt(int index,
-  int val) 
+SimpleMutableByteRange.putLong(int index,
+   long val) 
 
 
 ByteRange
@@ -363,13 +373,13 @@
 
 
 ByteRange
-SimpleByteRange.putLong(int index,
-   long val) 
+SimpleByteRange.putShort(int index,
+short val) 
 
 
 ByteRange
-SimpleMutableByteRange.putLong(int index,
-   long val) 
+SimpleMutableByteRange.putShort(int index,
+short val) 
 
 
 ByteRange
@@ -380,25 +390,21 @@
 
 
 ByteRange
-SimpleByteRange.putShort(int index,
-short val) 
+SimpleByteRange.set(byte[] bytes) 
 
 
 ByteRange
-SimpleMutableByteRange.putShort(int index,
-short val) 
-
-
-ByteRange
 ByteRange.set(byte[] bytes)
 Reuse this ByteRange over a new byte[].
 
 
-
+
 ByteRange
-SimpleByteRange.set(byte[] bytes) 
+SimpleByteRange.set(byte[] bytes,
+   int offset,
+   int length) 
 
-
+
 ByteRange
 ByteRange.set(byte[] bytes,
int offset,
@@ -406,22 +412,16 @@
 Reuse this ByteRange over a new byte[].
 
 
-
+
 ByteRange
-SimpleByteRange.set(byte[] bytes,
-   int offset,
-   int length) 
+SimpleByteRange.set(int capacity) 
 
-
+
 ByteRange
 ByteRange.set(int capacity)
 Reuse this ByteRange over a new byte[].
 
 
-
-ByteRange
-SimpleByteRange.set(int capacity) 
-
 
 ByteRange
 ByteRange.setLength(int length)
@@ -436,17 +436,27 @@
 
 
 ByteRange
+SimpleByteRange.shallowCopy() 
+
+
+ByteRange
+SimpleMutableByteRange.shallowCopy() 
+
+
+ByteRange
 ByteRange.shallowCopy()
 Create a new ByteRange that points at this 
range's byte[].
 
 
 
 ByteRange
-SimpleByteRange.shallowCopy() 
+SimpleByteRange.shallowCopySubRange(int innerOffset,
+   int copyLength) 
 
 
 ByteRange
-SimpleMutableByteRange.shallowCopy() 
+SimpleMutableByteRange.shallowCopySubRange(int innerOffset,
+   int copyLength) 
 
 
 ByteRange
@@ -457,13 +467,11 @@
 
 
 ByteRange
-SimpleByteRange.shallowCopySubRange(int innerOffset,
-   int copyLength) 
+SimpleByteRange.unset() 
 
 
 ByteRange
-SimpleMutableByteRange.shallowCopySubRange(int innerOffset,
-   int copyLength) 
+SimpleMutableByteRange.unset() 
 
 
 ByteRange
@@ -471,14 +479,6 @@
 Nullifies this ByteRange.
 
 
-
-ByteRange
-SimpleByteRange.unset() 
-
-
-ByteRange
-SimpleMutableByteRange.unset() 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
index 5257ca6..1da835b 100644
--- a/apidocs/org/apa

[50/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index fa4300b..d0b08af 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Apache HBase (TM) ACID Properties
@@ -618,7 +618,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 



[28/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 06707d0..a2eb3fa 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -298,10 +298,10 @@
  Warnings
  Errors
 
-2109
+2120
 0
 0
-14066
+14111
 
 Files
 
@@ -316,30 +316,30 @@
 0
 1
 
-maven-archiver/pom.properties
-0
-0
-1
-
 org/apache/hadoop/hbase/AsyncMetaTableAccessor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/AuthUtil.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/BaseConfigurable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java
 0
 0
 3
+
+org/apache/hadoop/hbase/ByteBufferKeyValue.java
+0
+0
+1
 
 org/apache/hadoop/hbase/Cell.java
 0
@@ -534,7 +534,7 @@
 org/apache/hadoop/hbase/ServerName.java
 0
 0
-28
+27
 
 org/apache/hadoop/hbase/SettableSequenceId.java
 0
@@ -654,7 +654,7 @@
 org/apache/hadoop/hbase/client/Admin.java
 0
 0
-73
+77
 
 org/apache/hadoop/hbase/client/Append.java
 0
@@ -664,24 +664,24 @@
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
 0
-4
+12
 
-org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java
+org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
 0
 0
-3
+1
 
-org/apache/hadoop/hbase/client/AsyncClientScanner.java
+org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java
 0
 0
-5
+3
 
-org/apache/hadoop/hbase/client/AsyncConnection.java
+org/apache/hadoop/hbase/client/AsyncClientScanner.java
 0
 0
-1
+5
 
-org/apache/hadoop/hbase/client/AsyncConnectionImpl.java
+org/apache/hadoop/hbase/client/AsyncConnection.java
 0
 0
 1
@@ -689,7 +689,7 @@
 org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
 0
 0
-15
+19
 
 org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
 0
@@ -699,7 +699,7 @@
 org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 0
 0
-5
+4
 
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
@@ -729,7 +729,7 @@
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java
 0
 0
-2
+4
 
 org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java
 0
@@ -786,445 +786,445 @@
 0
 2
 
-org/apache/hadoop/hbase/client/ClientSimpleScanner.java
-0
-0
-1
-
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ConnectionConfiguration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/CoprocessorHConnection.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
-80
-
+83
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
 24
-
+
 org/apache/hadoop/hbase/client/HTableInterface.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/HTableWrapper.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/Increment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MetaCache.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
 39
-
+
 org/apache/hadoop/hbase/client/MultiAction.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/MultiResponse.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/client/Operation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/Put.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/client/Query.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/RawAsyncTable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RawScanResultConsumer.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionReplicaUtil.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/RegionServerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Registry.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RegistryFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RequestController.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/RequestControllerFactory.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/clie

[40/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 8969141..2b8632c 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,115 +107,115 @@
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cell cell) 
+ColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cell cell) 
+MultipleColumnPrefixFilter.filterColumn(Cell cell) 
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cell v) 
+PrefixFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cell kv) 
+PageFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cell v) 
+KeyOnlyFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cell v) 
+WhileMatchFilter.filterKeyValue(Cell v) 
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cell v)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+QualifierFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cell ignored) 
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
+Deprecated. 
+ 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cell v) 
+SkipFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cell ignored) 
+ColumnPrefixFilter.filterKeyValue(Cell cell) 
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cell v) 
+ColumnCountGetFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
+DependentColumnFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cell v) 
+MultipleColumnPrefixFilter.filterKeyValue(Cell kv) 
 
 
-Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cell v) 
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cell v)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cell ignored) 
+FamilyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cell v) 
+FilterList.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cell c) 
+FirstKeyOnlyFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cell v) 
+RowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cell v)
-Deprecated. 
- 
+ValueFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cell c) 
+MultiRowRangeFilter.filterKeyValue(Cell ignored) 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cell v) 
+InclusiveStopFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cell v) 
+FuzzyRowFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cell v) 
+SingleColumnValueFilter.filterKeyValue(Cell c) 
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cell c) 
+RandomRowFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cell v) 
+ColumnRangeFilter.filterKeyValue(Cell kv) 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cell cell) 
+ColumnPaginationFilter.filterKeyValue(Cell v) 
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cell c) 
+TimestampsFilter.filterKeyValue(Cell v) 
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 2968f40..4222c2b 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -160,15 +160,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Scan.setFilter(Filter filter) 
 
 
+Get
+Get.setFilter(Filter filter) 
+
+
 Query
 Query.setFilter(Filter filter)
 Apply the specified server-side filter when performing the 
Query.
 
 
-
-Get
-Get.setFilter(Filter filter) 
-
 
 
 
@@ -394,11 +394,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterA

[11/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index a78123f..83705e7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -219,15 +219,15 @@ the order they are declared.
 
 
 
-private KeepDeletedCells
-LegacyScanQueryMatcher.keepDeletedCells
-Deprecated. 
+protected KeepDeletedCells
+CompactionScanQueryMatcher.keepDeletedCells
 whether to return deleted rows
 
 
 
-protected KeepDeletedCells
-CompactionScanQueryMatcher.keepDeletedCells
+private KeepDeletedCells
+LegacyScanQueryMatcher.keepDeletedCells
+Deprecated. 
 whether to return deleted rows
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 766db09..5e9771c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -201,22 +201,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValueUtil.create(http://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
+KeyValue.create(http://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
 
 
 static KeyValue
-KeyValue.create(http://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
+KeyValueUtil.create(http://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in) 
 
 
 static KeyValue
-KeyValueUtil.create(int length,
+KeyValue.create(int length,
   http://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in)
 Create a KeyValue reading length from 
in
 
 
 
 static KeyValue
-KeyValue.create(int length,
+KeyValueUtil.create(int length,
   http://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true";
 title="class or interface in java.io">DataInput in)
 Create a KeyValue reading length from 
in
 
@@ -332,31 +332,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[] b) 
+KeyValue.createKeyValueFromKey(byte[] b) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[] b) 
+KeyValueUtil.createKeyValueFromKey(byte[] b) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[] b,
+KeyValue.createKeyValueFromKey(byte[] b,
  int o,
  int l) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[] b,
+KeyValueUtil.createKeyValueFromKey(byte[] b,
  int o,
  int l) 
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
+KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
+KeyValueUtil.createKeyValueFromKey(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer bb) 
 
 
 static KeyValue
@@ -526,17 +526,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static long
-KeyValueUtil.write(KeyValue kv,
+KeyValue.write(KeyValue kv,
  http://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true";
 title="class or interface in java.io">DataOutput out)
-Write out a KeyValue in the manner in which we used to when 
KeyValue was a
- Writable.
+Write out a KeyValue in the manner in which we used to when 
KeyValue was a Writable.
 
 
 
 static long
-KeyValue.write(KeyValue kv,
+KeyValueUtil.write(KeyValue kv,
  http://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true";
 title="class or interface in java.io">DataOutput out)
-Write out a KeyValue in the manner in which we used to when 
KeyValue was a Writable.
+Write

[52/52] hbase-site git commit: Empty commit

2017-02-17 Thread stack
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/c6ddb98f
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/c6ddb98f
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/c6ddb98f

Branch: refs/heads/asf-site
Commit: c6ddb98fc6fc73fd75f2cfe0789263db4e9373a5
Parents: ce958bc
Author: Michael Stack 
Authored: Fri Feb 17 15:42:22 2017 -0800
Committer: Michael Stack 
Committed: Fri Feb 17 15:42:22 2017 -0800

--

--




[27/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/coc.html
--
diff --git a/coc.html b/coc.html
index 9fd05d4..6d64b27 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -349,7 +349,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index bf3db10..280efe5 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -691,7 +691,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index d0156bf..6002943 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -536,7 +536,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index f25f0a6..3838d7c 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -1048,7 +1048,6 @@
 http://hbase.apache.org/hbase-procedure";>org.apache.hbase:hbase-procedure
 http://hbase.apache.org/hbase-rest";>org.apache.hbase:hbase-rest
 http://hbase.apache.org/hbase-rsgroup";>org.apache.hbase:hbase-rsgroup
-http://hbase.apache.org/hbase-rsgroup";>org.apache.hbase:hbase-rsgroup
 http://hbase.apache.org/hbase-server";>org.apache.hbase:hbase-server
 http://hbase.apache.org/hbase-server";>org.apache.hbase:hbase-server
 http://hbase.apache.org/hbase-shell";>org.apache.hbase:hbase-shell
@@ -1872,7 +1871,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 482f752..1558395 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Dependency Information
 
@@ -330,7 +330,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 32e6f0c..6381b50 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependency Management
 
@@ -918,7 +918,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-02-05
+  Last Published: 
2017-02-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/all

[30/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
index d649da0..3150448 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.html
@@ -169,158 +169,160 @@
 161   *
 162   */
 163  public void close() {
-164this.scanner.close();
-165try {
-166  this.htable.close();
-167} catch (IOException ioe) {
-168  LOG.warn("Error closing table", 
ioe);
-169}
-170  }
-171
-172  /**
-173   * Returns the current key.
-174   *
-175   * @return The current key.
-176   * @throws IOException
-177   * @throws InterruptedException When 
the job is aborted.
-178   */
-179  public ImmutableBytesWritable 
getCurrentKey() throws IOException,
-180  InterruptedException {
-181return key;
-182  }
-183
-184  /**
-185   * Returns the current value.
-186   *
-187   * @return The current value.
-188   * @throws IOException When the value 
is faulty.
-189   * @throws InterruptedException When 
the job is aborted.
-190   */
-191  public Result getCurrentValue() throws 
IOException, InterruptedException {
-192return value;
-193  }
-194
-195
-196  /**
-197   * Positions the record reader to the 
next record.
-198   *
-199   * @return 
true if there was another record.
-200   * @throws IOException When reading the 
record failed.
-201   * @throws InterruptedException When 
the job was aborted.
-202   */
-203  public boolean nextKeyValue() throws 
IOException, InterruptedException {
-204if (key == null) key = new 
ImmutableBytesWritable();
-205if (value == null) value = new 
Result();
-206try {
-207  try {
-208value = this.scanner.next();
-209if (value != null && 
value.isStale()) numStale++;
-210if (logScannerActivity) {
-211  rowcount ++;
-212  if (rowcount >= 
logPerRowCount) {
-213long now = 
System.currentTimeMillis();
-214LOG.info("Mapper took " + 
(now-timestamp)
-215  + "ms to process " + 
rowcount + " rows");
-216timestamp = now;
-217rowcount = 0;
-218  }
-219}
-220  } catch (IOException e) {
-221// do not retry if the exception 
tells us not to do so
-222if (e instanceof 
DoNotRetryIOException) {
-223  throw e;
-224}
-225// try to handle all other 
IOExceptions by restarting
-226// the scanner, if the second 
call fails, it will be rethrown
-227LOG.info("recovered from " + 
StringUtils.stringifyException(e));
-228if (lastSuccessfulRow == null) 
{
-229  LOG.warn("We are restarting the 
first next() invocation," +
-230  " if your mapper has 
restarted a few other times like this" +
-231  " then you should consider 
killing this job and investigate" +
-232  " why it's taking so 
long.");
-233}
-234if (lastSuccessfulRow == null) 
{
-235  restart(scan.getStartRow());
-236} else {
-237  restart(lastSuccessfulRow);
-238  scanner.next();// skip 
presumed already mapped row
-239}
-240value = scanner.next();
-241if (value != null && 
value.isStale()) numStale++;
-242numRestarts++;
-243  }
-244  if (value != null && 
value.size() > 0) {
-245key.set(value.getRow());
-246lastSuccessfulRow = key.get();
-247return true;
-248  }
-249
-250  updateCounters();
-251  return false;
-252} catch (IOException ioe) {
-253  if (logScannerActivity) {
-254long now = 
System.currentTimeMillis();
-255LOG.info("Mapper took " + 
(now-timestamp)
-256  + "ms to process " + rowcount + 
" rows");
-257LOG.info(ioe);
-258String lastRow = 
lastSuccessfulRow == null ?
-259  "null" : 
Bytes.toStringBinary(lastSuccessfulRow);
-260LOG.info("lastSuccessfulRow=" + 
lastRow);
-261  }
-262  throw ioe;
-263}
-264  }
-265
-266  /**
-267   * If hbase runs on new version of 
mapreduce, RecordReader has access to
-268   * counters thus can update counters 
based on scanMetrics.
-269   * If hbase runs on old version of 
mapreduce, it won't be able to get
-270   * access to counters and 
TableRecorderReader can't update counter values.
-271   * @throws IOException
-272   */
-273  private void updateCounters() throws 
IOException {
-274ScanMetrics scanMetrics = 
currentScan.getScanMetrics();
-275if (scanMetrics == null) {
-276  return;
-277}
-278
-279updateCounters(scanMetrics, 
numRestarts, getCounter, context, numStale);
-280  }
-281

[45/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index e373ad7..db3ef41 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -477,34 +477,34 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncAdmin.createTable(HTableDescriptor desc)
+void
+Admin.createTable(HTableDescriptor desc)
 Creates a new table.
 
 
 
-void
-Admin.createTable(HTableDescriptor desc)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.createTable(HTableDescriptor desc)
 Creates a new table.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncAdmin.createTable(HTableDescriptor desc,
+void
+Admin.createTable(HTableDescriptor desc,
byte[][] splitKeys)
 Creates a new table with an initial set of empty regions 
defined by the specified split keys.
 
 
 
-void
-Admin.createTable(HTableDescriptor desc,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.createTable(HTableDescriptor desc,
byte[][] splitKeys)
 Creates a new table with an initial set of empty regions 
defined by the specified split keys.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncAdmin.createTable(HTableDescriptor desc,
+void
+Admin.createTable(HTableDescriptor desc,
byte[] startKey,
byte[] endKey,
int numRegions)
@@ -512,8 +512,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-void
-Admin.createTable(HTableDescriptor desc,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.createTable(HTableDescriptor desc,
byte[] startKey,
byte[] endKey,
int numRegions)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index f90d305..fc6a047 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -320,59 +320,66 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.closeRegion(ServerName sn,
+   HRegionInfo hri)
+Close a region.
+
+
+
 void
 Admin.compactRegionServer(ServerName sn,
boolean major)
 Compact all regions on the region server
 
 
-
+
 CoprocessorRpcChannel
 Admin.coprocessorService(ServerName sn)
 Creates and returns a RpcChannel instance
  connected to the passed region server.
 
 
-
+
 void
 RequestController.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in 
java.util">Collection regions,
S

[17/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 7ffb6db..d095eed 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -253,39 +253,39 @@
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
+RowIndexSeekerV1.compareKey(CellComparator comparator,
   Cell key) 
 
 
 int
-RowIndexSeekerV1.compareKey(CellComparator comparator,
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparator comparator,
   Cell key) 
 
 
 DataBlockEncoder.EncodedSeeker
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
+
+
+DataBlockEncoder.EncodedSeeker
 DataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx)
 Create a HFileBlock seeker which find KeyValues within a 
block.
 
 
-
-DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
-
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+RowIndexCodecV1.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparator comparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
@@ -329,20 +329,18 @@
 
 
 protected CellComparator
-CompoundBloomFilterBase.comparator
-Comparator used to compare Bloom filter keys
-
+HFile.WriterFactory.comparator 
 
 
 private CellComparator
-HFileReaderImpl.comparator
-Key comparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
-private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+protected CellComparator
+CompoundBloomFilterBase.comparator
+Comparator used to compare Bloom filter keys
 
 
 
@@ -352,8 +350,10 @@
 
 
 
-protected CellComparator
-HFile.WriterFactory.comparator 
+private CellComparator
+HFileReaderImpl.comparator
+Key comparator
+
 
 
 
@@ -374,11 +374,11 @@
 
 
 CellComparator
-HFileReaderImpl.getComparator() 
+HFile.Reader.getComparator() 
 
 
 CellComparator
-HFile.Reader.getComparator() 
+HFileReaderImpl.getComparator() 
 
 
 
@@ -531,43 +531,43 @@
 
 
 private CellComparator
-ScanInfo.comparator 
+HStore.comparator 
 
 
 private CellComparator
-Segment.comparator 
+StoreFileWriter.Builder.comparator 
 
 
-protected CellComparator
-HRegion.RegionScannerImpl.comparator 
+private CellComparator
+Segment.comparator 
 
 
 private CellComparator
-CompositeImmutableSegment.comparator 
+AbstractMemStore.comparator 
 
 
+private CellComparator
+ScanInfo.comparator 
+
+
 protected CellComparator
 StripeStoreFlusher.StripeFlushRequest.comparator 
 
-
+
 private CellComparator
-MemStoreScanner.comparator 
+CompositeImmutableSegment.comparator 
 
-
+
 protected CellComparator
 StripeMultiFileWriter.comparator 
 
-
-private CellComparator
-AbstractMemStore.comparator 
-
 
-private CellComparator
-HStore.comparator 
+protected CellComparator
+HRegion.RegionScannerImpl.comparator 
 
 
 private CellComparator
-StoreFileWriter.Builder.comparator 
+MemStoreScanner.comparator 
 
 
 private CellComparator
@@ -588,54 +588,54 @@
 
 
 CellComparator
-HRegion.getCellComparator() 
-
-
-CellComparator
 Region.getCellComparator()
 The comparator to be used with the region
 
 
-
-CellComparator
-ScanInfo.getComparator() 
-
 
 CellComparator
-StoreFileReader.getComparator() 
+HRegion.getCellComparator() 
 
 
+CellComparator
+HStore.getComparator() 
+
+
 protected CellComparator
 Segment.getComparator()
 Returns the Cell comparator used by this segment
 
 
+
+CellComparator
+StoreFileReader.getComparator() 
+
 
 protected CellComparator
-CompositeImmutableSegment.getComparator()
-Returns the Cell comparator used by this segment
-
+AbstractMemStore.getComparator() 
 
 
 CellComparator
-KeyValueHeap.KVScannerComparator.getComparator() 
+ScanInfo.getComparator() 
 
 
 CellComparator
-Store.getComparator() 
+KeyValueHeap.KVScannerComparator.getComparator() 
 
 
 protected CellComparator
-AbstractMemStore.getComparator() 
+CompositeImmutableSegment.getComparator()
+Returns the Cell comparator used by this segment
+
 
 
-CellComparator
-HStore.getComparator(

[31/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
index 288dfb3..6df12f7 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.html
@@ -162,109 +162,111 @@
 154  }
 155
 156  public void close() {
-157this.scanner.close();
-158try {
-159  this.htable.close();
-160} catch (IOException ioe) {
-161  LOG.warn("Error closing table", 
ioe);
-162}
-163  }
-164
-165  /**
-166   * @return ImmutableBytesWritable
-167   *
-168   * @see 
org.apache.hadoop.mapred.RecordReader#createKey()
-169   */
-170  public ImmutableBytesWritable 
createKey() {
-171return new 
ImmutableBytesWritable();
-172  }
-173
-174  /**
-175   * @return RowResult
-176   *
-177   * @see 
org.apache.hadoop.mapred.RecordReader#createValue()
-178   */
-179  public Result createValue() {
-180return new Result();
-181  }
-182
-183  public long getPos() {
-184// This should be the ordinal tuple 
in the range;
-185// not clear how to calculate...
-186return 0;
-187  }
-188
-189  public float getProgress() {
-190// Depends on the total number of 
tuples and getPos
-191return 0;
-192  }
-193
-194  /**
-195   * @param key HStoreKey as input key.
-196   * @param value MapWritable as input 
value
-197   * @return true if there was more 
data
-198   * @throws IOException
-199   */
-200  public boolean 
next(ImmutableBytesWritable key, Result value)
-201  throws IOException {
-202Result result;
-203try {
-204  try {
-205result = this.scanner.next();
-206if (logScannerActivity) {
-207  rowcount ++;
-208  if (rowcount >= 
logPerRowCount) {
-209long now = 
System.currentTimeMillis();
-210LOG.info("Mapper took " + 
(now-timestamp)
-211  + "ms to process " + 
rowcount + " rows");
-212timestamp = now;
-213rowcount = 0;
-214  }
-215}
-216  } catch (IOException e) {
-217// do not retry if the exception 
tells us not to do so
-218if (e instanceof 
DoNotRetryIOException) {
-219  throw e;
-220}
-221// try to handle all other 
IOExceptions by restarting
-222// the scanner, if the second 
call fails, it will be rethrown
-223LOG.debug("recovered from " + 
StringUtils.stringifyException(e));
-224if (lastSuccessfulRow == null) 
{
-225  LOG.warn("We are restarting the 
first next() invocation," +
-226  " if your mapper has 
restarted a few other times like this" +
-227  " then you should consider 
killing this job and investigate" +
-228  " why it's taking so 
long.");
-229}
-230if (lastSuccessfulRow == null) 
{
-231  restart(startRow);
-232} else {
-233  restart(lastSuccessfulRow);
-234  this.scanner.next();// skip 
presumed already mapped row
-235}
-236result = this.scanner.next();
-237  }
-238
-239  if (result != null && 
result.size() > 0) {
-240key.set(result.getRow());
-241lastSuccessfulRow = key.get();
-242value.copyFrom(result);
-243return true;
-244  }
-245  return false;
-246} catch (IOException ioe) {
-247  if (logScannerActivity) {
-248long now = 
System.currentTimeMillis();
-249LOG.info("Mapper took " + 
(now-timestamp)
-250  + "ms to process " + rowcount + 
" rows");
-251LOG.info(ioe);
-252String lastRow = 
lastSuccessfulRow == null ?
-253  "null" : 
Bytes.toStringBinary(lastSuccessfulRow);
-254LOG.info("lastSuccessfulRow=" + 
lastRow);
-255  }
-256  throw ioe;
-257}
-258  }
-259}
+157if (this.scanner != null) {
+158  this.scanner.close();
+159}
+160try {
+161  this.htable.close();
+162} catch (IOException ioe) {
+163  LOG.warn("Error closing table", 
ioe);
+164}
+165  }
+166
+167  /**
+168   * @return ImmutableBytesWritable
+169   *
+170   * @see 
org.apache.hadoop.mapred.RecordReader#createKey()
+171   */
+172  public ImmutableBytesWritable 
createKey() {
+173return new 
ImmutableBytesWritable();
+174  }
+175
+176  /**
+177   * @return RowResult
+178   *
+179   * @see 
org.apache.hadoop.mapred.RecordReader#createValue()
+180   */
+181  public Result createValue() {
+182return new Result();
+183  }
+184
+185  public long getPos() {
+186// This should be the ordinal tuple 
in the range;
+187// not clear how to calculate...
+188return 0;
+189  }
+190
+191  public float getProgress() {
+192// Depends on the total 

[42/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
index 9fb7f70..5e80df4 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
@@ -246,26 +246,26 @@
 
 
 
-boolean
-Table.exists(Get get)
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
+AsyncTableBase.exists(Get get)
 Test for the existence of columns in the table, as 
specified by the Get.
 
 
 
-default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureBoolean>
-AsyncTableBase.exists(Get get)
+boolean
+Table.exists(Get get)
 Test for the existence of columns in the table, as 
specified by the Get.
 
 
 
-Result
-Table.get(Get get)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+AsyncTableBase.get(Get get)
 Extracts certain cells from a given row.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncTableBase.get(Get get)
+Result
+Table.get(Get get)
 Extracts certain cells from a given row.
 
 
@@ -285,26 +285,26 @@
 
 
 
-boolean[]
-Table.existsAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
-Test for the existence of columns in the table, as 
specified by the Gets.
-
-
-
 default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureListBoolean>>
 AsyncTableBase.existsAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 A simple version for batch exists.
 
 
+
+boolean[]
+Table.existsAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+Test for the existence of columns in the table, as 
specified by the Gets.
+
+
 
-Result[]
-Table.get(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListCompletableFuture>
+AsyncTableBase.get(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 Extracts certain cells from the given rows, in batch.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListCompletableFuture>
-AsyncTableBase.get(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+Result[]
+Table.get(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 Extracts certain cells from the given rows, in batch.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Increment.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
index db23811..52079a5 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Increment.html
+++ b/ap

[33/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
index c1efaa7..1a6b0c2 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
@@ -48,913 +48,927 @@
 040import 
org.apache.hadoop.hbase.KeyValueUtil;
 041import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 042import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045
-046/**
-047 * Single row result of a {@link Get} or 
{@link Scan} query.

-048 * -049 * This class is NOT THREAD SAFE.

-050 * -051 * Convenience methods are available that return various {@link Map} -052 * structures and values directly.

-053 * -054 * To get a complete mapping of all cells in the Result, which can include -055 * multiple families and multiple versions, use {@link #getMap()}.

-056 * -057 * To get a mapping of each family to its columns (qualifiers and values), -058 * including only the latest version of each, use {@link #getNoVersionMap()}. -059 * -060 * To get a mapping of qualifiers to latest values for an individual family use -061 * {@link #getFamilyMap(byte[])}.

-062 * -063 * To get the latest value for a specific family and qualifier use -064 * {@link #getValue(byte[], byte[])}. -065 * -066 * A Result is backed by an array of {@link Cell} objects, each representing -067 * an HBase cell defined by the row, family, qualifier, timestamp, and value.

-068 * -069 * The underlying {@link Cell} objects can be accessed through the method {@link #listCells()}. -070 * This will create a List from the internal Cell []. Better is to exploit the fact that -071 * a new Result instance is a primed {@link CellScanner}; just call {@link #advance()} and -072 * {@link #current()} to iterate over Cells as you would any {@link CellScanner}. -073 * Call {@link #cellScanner()} to reset should you need to iterate the same Result over again -074 * ({@link CellScanner}s are one-shot). -075 * -076 * If you need to overwrite a Result with another Result instance -- as in the old 'mapred' -077 * RecordReader next invocations -- then create an empty Result with the null constructor and -078 * in then use {@link #copyFrom(Result)} -079 */ -080@InterfaceAudience.Public -081@InterfaceStability.Stable -082public class Result implements CellScannable, CellScanner { -083 private Cell[] cells; -084 private Boolean exists; // if the query was just to check existence. -085 private boolean stale = false; -086 -087 /** -088 * Partial results do not contain the full row's worth of cells. The result had to be returned in -089 * parts because the size of the cells in the row exceeded the RPC result size on the server. -090 * Partial results must be combined client side with results representing the remainder of the -091 * row's cells to form the complete result. Partial results and RPC result size allow us to avoid -092 * OOME on the server when servicing requests for large rows. The Scan configuration used to -093 * control the result size on the server is {@link Scan#setMaxResultSize(long)} and the default -094 * value can be seen here: {@link HConstants#DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE} -095 */ -096 private boolean partial = false; -097 // We're not using java serialization. Transient here is just a marker to say -098 // that this is where we cache row if we're ever asked for it. -099 private transient byte [] row = null; -100 // Ditto for familyMap. It can be composed on fly from passed in kvs. -101 private transient NavigableMap>> -102 familyMap = null; +043import org.apache.hadoop.hbase.util.Bytes; +044 +045/** +046 * Single row result of a {@link Get} or {@link Scan} query.

+047 * +048 * This class is NOT THREAD SAFE.

+049 * +050 * Convenience methods are available that return various {@link Map} +051 * structures and values directly.

+052 * +053 * To get a complete mapping of all cells in the Result, which can include +054 * multiple families and multiple versions, use {@link #getMap()}.

+055 * +056 * To get a mapping of each family to its columns (qualifiers and values), +057 * including only the latest version of each, use {@link #getNoVersionMap()}. +058 * +059 * To get a mapping of qualifiers to latest values for an individual family use +060 * {@link #getFamilyMap(byte[])}.

+061 * +062 * To get the latest value for a specific family and qualifier use +063 * {@link #getValue(byte[], byte[])}. +064 * +065 * A


[10/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
index 7f911f0..ca4677c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
@@ -217,11 +217,11 @@
 
 
 RegionLocations
-ZooKeeperRegistry.getMetaRegionLocation() 
+Registry.getMetaRegionLocation() 
 
 
 RegionLocations
-Registry.getMetaRegionLocation() 
+ZooKeeperRegistry.getMetaRegionLocation() 
 
 
 (package private) static RegionLocations
@@ -247,21 +247,21 @@
 
 
 RegionLocations
-ConnectionImplementation.locateRegion(TableName tableName,
+ClusterConnection.locateRegion(TableName tableName,
 byte[] row,
 boolean useCache,
 boolean retry) 
 
 
 RegionLocations
-ClusterConnection.locateRegion(TableName tableName,
+ConnectionImplementation.locateRegion(TableName tableName,
 byte[] row,
 boolean useCache,
 boolean retry) 
 
 
 RegionLocations
-ConnectionImplementation.locateRegion(TableName tableName,
+ClusterConnection.locateRegion(TableName tableName,
 byte[] row,
 boolean useCache,
 boolean retry,
@@ -269,7 +269,7 @@
 
 
 RegionLocations
-ClusterConnection.locateRegion(TableName tableName,
+ConnectionImplementation.locateRegion(TableName tableName,
 byte[] row,
 boolean useCache,
 boolean retry,
@@ -285,12 +285,6 @@
 
 
 RegionLocations
-ConnectionImplementation.relocateRegion(TableName tableName,
-  byte[] row,
-  int replicaId) 
-
-
-RegionLocations
 ClusterConnection.relocateRegion(TableName tableName,
   byte[] row,
   int replicaId)
@@ -298,6 +292,12 @@
  lives in, ignoring any value that might be in the cache.
 
 
+
+RegionLocations
+ConnectionImplementation.relocateRegion(TableName tableName,
+  byte[] row,
+  int replicaId) 
+
 
 
 
@@ -368,16 +368,16 @@
 
 
 void
+ClusterConnection.cacheLocation(TableName tableName,
+ RegionLocations location) 
+
+
+void
 ConnectionImplementation.cacheLocation(TableName tableName,
  RegionLocations location)
 Put a newly discovered HRegionLocation into the cache.
 
 
-
-void
-ClusterConnection.cacheLocation(TableName tableName,
- RegionLocations location) 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
index 813c81d..3117db7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
@@ -180,43 +180,43 @@
 
 
 void
-ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore)
-Cancel any ongoing schedules that this chore has with the 
implementer of this interface.
-
+ChoreService.cancelChore(ScheduledChore chore) 
 
 
 void
-ChoreService.cancelChore(ScheduledChore chore) 
+ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore)
+Cancel any ongoing schedules that this chore has with the 
implementer of this interface.
+
 
 
 void
-ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore,
+ChoreService.cancelChore(ScheduledChore chore,
boolean mayInterruptIfRunning) 
 
 
 void
-ChoreService.cancelChore(ScheduledChore chore,
+ScheduledChore.ChoreServicer.cancelChore(ScheduledChore chore,
boolean mayInterruptIfRunning) 
 
 
 boolean
-ScheduledChore.ChoreServicer.isChoreScheduled(ScheduledChore chore) 
+ChoreService.isChoreScheduled(ScheduledChore chore) 
 
 
 boolean
-ChoreService.isChoreScheduled(ScheduledChore chore) 
+ScheduledChore.ChoreServicer.isChoreScheduled(ScheduledChore chore) 
 
 
 void
+ChoreService.onChoreMissedStartTime(ScheduledChore chore) 
+
+
+void
 ScheduledChore.ChoreServicer.onChoreMissedStartTime(ScheduledChore chore)
 A callback that tells the implementer of this interface 
that one of the scheduled chores is
  missing its start time.
 
 
-
-void
-ChoreService.onChoreMissedStartTime(ScheduledChore chore) 
-
 
 private void
 ChoreService.printChoreDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String header,
@@ -234,13 +234,13 @@
 
 
 boolean
-ScheduledChore.ChoreServicer.triggerNow(ScheduledChore chore)
-This method tries to execute the chore immediately.
-
+ChoreService.triggerNow(ScheduledChore chore) 
 
 
 boolean
-ChoreService.triggerNow(Sche

[05/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
index 1a124cf..6cf6d7c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
@@ -594,6 +594,20 @@ service.
 
 
 class 
+ByteBufferKeyValue
+This Cell is an implementation of ByteBufferCell where 
the data resides in
+ off heap/ on heap ByteBuffer
+
+
+
+class 
+ByteBufferTag
+This is a Tag implementation in which value is 
backed by
+ http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+
+
+
+class 
 CellComparator
 Compare two HBase cells.
 
@@ -784,110 +798,102 @@ service.
 
 
 class 
-NoTagsKeyValue
-An extension of the KeyValue where the tags length is 
always 0
+NoTagsByteBufferKeyValue
+An extension of the ByteBufferKeyValue where the tags 
length is always 0
 
 
 
 class 
-OffheapKeyValue
-This Cell is an implementation of ByteBufferCell where 
the data resides in off heap
- memory.
+NoTagsKeyValue
+An extension of the KeyValue where the tags length is 
always 0
 
 
 
 class 
-OffheapTag
-This is a Tag implementation in which value is 
backed by an off heap
- http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-
-
-
-class 
 RegionLocations
 Container for holding a list of HRegionLocation's 
that correspond to the
  same range.
 
 
-
+
 interface 
 RegionStateListener
 The listener interface for receiving region state 
events.
 
 
-
+
 interface 
 Service
 Simple Service.
 
 
-
+
 class 
 ServiceNotRunningException 
 
-
+
 interface 
 ShareableMemory
 A cell implementing this interface would mean that the 
memory area backing this cell will refer
  to a memory area that could be part of a larger common memory area used by 
the RegionServer.
 
 
-
+
 class 
 SizeCachedKeyValue
 This class is an extension to KeyValue where rowLen and 
keyLen are cached.
 
 
-
+
 class 
 SizeCachedNoTagsKeyValue
 This class is an extension to ContentSizeCachedKeyValue 
where there are no tags in Cell.
 
 
-
+
 class 
 SplitLogCounters
 Counters kept by the distributed WAL split log 
process.
 
 
-
+
 class 
 SplitLogTask
 State of a WAL log split during distributed splitting.
 
 
-
+
 interface 
 TableDescriptors
 Get, remove and modify table descriptors.
 
 
-
+
 interface 
 Tag
 Tags are part of cells and helps to add metadata about 
them.
 
 
-
+
 class 
 TagType 
 
-
+
 class 
 TagUtil 
 
-
+
 class 
 Version 
 
-
+
 class 
 YouAreDeadException
 This exception is thrown by the master when a region server 
reports and is
  already being processed as dead.
 
 
-
+
 class 
 ZKNamespaceManager
 Class servers two purposes:
@@ -1265,143 +1271,147 @@ service.
 
 
 
+class 
+AsyncAdminRequestRetryingCaller 
+
+
 (package private) class 
 AsyncBatchRpcRetryingCaller
 Retry caller for batch.
 
 
-
+
 (package private) class 
 AsyncClientScanner
 The asynchronous client scanner implementation.
 
 
-
+
 (package private) class 
 AsyncConnectionConfiguration
 Timeout configs.
 
 
-
+
 (package private) class 
 AsyncConnectionImpl
 The implementation of AsyncConnection.
 
 
-
+
 class 
 AsyncHBaseAdmin
 The implementation of AsyncAdmin.
 
 
-
+
 class 
 AsyncMasterRequestRpcRetryingCaller
 Retry caller for a request call to master.
 
 
-
+
 (package private) class 
 AsyncMetaRegionLocator
 The asynchronous locator for meta region.
 
 
-
+
 (package private) class 
 AsyncNonMetaRegionLocator
 The asynchronous locator for regions other than meta.
 
 
-
+
 (package private) class 
 AsyncProcess
 This class  allows a continuous flow of requests.
 
 
-
+
 class 
 AsyncProcessTask
 Contains the attributes of a task which will be executed
  by AsyncProcess.
 
 
-
+
 (package private) class 
 AsyncRegionLocator
 The asynchronous region locator.
 
 
-
+
 (package private) interface 
 AsyncRegistry
 Implementations hold cluster information such as this 
cluster's id, location of hbase:meta, etc..
 
 
-
+
 (package private) class 
 AsyncRegistryFactory
 Get instance of configured Registry.
 
 
-
+
 interface 
 AsyncRequestFuture
 The context used to wait for results from one submit 
call.
 
 
-
+
 (package private) class 
 AsyncRequestFutureImpl
 The context, and return value, for a single 
submit/submitAll call.
 
 
-
+
 class 
 AsyncRpcRetryingCaller 
 
-
+
 (package private) class 
 AsyncRpcRetryingCallerFactory
 Factory to create an AsyncRpcRetryCaller.
 
 
-
+
 (package private) cla

[07/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 50cb53e..fdc525a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -333,11 +333,11 @@ service.
 
 
 private TableName
-HRegionInfo.tableName 
+MetaTableAccessor.TableVisitorBase.tableName 
 
 
 private TableName
-MetaTableAccessor.TableVisitorBase.tableName 
+HRegionInfo.tableName 
 
 
 
@@ -792,24 +792,24 @@ service.
 RegionCoprocessorRpcChannel.table 
 
 
-protected TableName
-AsyncHBaseAdmin.TableProcedureBiConsumer.tableName 
+private TableName
+RegionCoprocessorRpcChannelImpl.tableName 
 
 
 private TableName
-AsyncProcessTask.tableName 
+HRegionLocator.tableName 
 
 
 private TableName
-AsyncProcessTask.Builder.tableName 
+HBaseAdmin.TableFuture.tableName 
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName 
+private TableName
+AsyncProcessTask.tableName 
 
 
 private TableName
-ClientScanner.tableName 
+AsyncProcessTask.Builder.tableName 
 
 
 private TableName
@@ -817,80 +817,80 @@ service.
 
 
 private TableName
-BufferedMutatorImpl.tableName 
+ClientScanner.tableName 
 
 
 private TableName
 AsyncSingleRequestRpcRetryingCaller.tableName 
 
 
-protected TableName
-AsyncTableBuilderBase.tableName 
+private TableName
+HTable.tableName 
 
 
 private TableName
-RegionServerCallable.tableName 
+AsyncBatchRpcRetryingCaller.tableName 
 
 
 private TableName
-BufferedMutatorParams.tableName 
+TableState.tableName 
 
 
 private TableName
-HTable.tableName 
+AsyncTableRegionLocatorImpl.tableName 
 
 
 private TableName
-HRegionLocator.tableName 
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName 
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName 
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName 
 
 
 private TableName
-ScannerCallableWithReplicas.tableName 
+RawAsyncTableImpl.tableName 
 
 
-private TableName
-AsyncClientScanner.tableName 
+protected TableName
+RegionAdminServiceCallable.tableName 
 
 
 private TableName
-RawAsyncTableImpl.tableName 
+BufferedMutatorImpl.tableName 
 
 
 private TableName
-AsyncBatchRpcRetryingCaller.tableName 
+ScannerCallableWithReplicas.tableName 
+
+
+protected TableName
+AsyncHBaseAdmin.TableProcedureBiConsumer.tableName 
+
+
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName 
 
 
 private TableName
-RegionCoprocessorRpcChannelImpl.tableName 
+RegionServerCallable.tableName 
 
 
 private TableName
-AsyncTableRegionLocatorImpl.tableName 
+AsyncClientScanner.tableName 
 
 
 protected TableName
-RegionAdminServiceCallable.tableName 
+AsyncTableBuilderBase.tableName 
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName 
+BufferedMutatorParams.tableName 
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName 
-
-
 protected TableName
 TableBuilderBase.tableName 
 
-
-private TableName
-TableState.tableName 
-
 
 
 
@@ -927,57 +927,57 @@ service.
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+AsyncTableBase.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
-
+HRegionLocator.getName() 
 
 
 TableName
-AsyncTableBase.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-BufferedMutatorImpl.getName() 
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-HTable.getName() 
-
-
-TableName
 AsyncTableRegionLocator.getName()
 Gets the fully qualified table name instance of the table 
whose region we want to locate.
 
 
+
+TableName
+HTable.getName() 
+
 
 TableName
-HRegionLocator.getName() 
+AsyncTableRegionLocatorImpl.getName() 
 
 
 TableName
-RawAsyncTableImpl.getName() 
+AsyncTableImpl.getName() 
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
 TableName
-AsyncTableRegionLocatorImpl.getName() 
+RawAsyncTableImpl.getName() 
 
 
 TableName
-AsyncTableImpl.getName() 
+BufferedMutatorImpl.getName() 
 
 
 TableName
@@ -989,29 +989,29 @@ service.
 
 
 TableName
-AsyncProcessTask.getTableName() 
+SnapshotDescription.getTableName() 
 
 
-TableName
-SnapshotDescription.getTableName() 
+protected TableName
+HBaseAdmin.TableFuture.getTableName() 
 
 
 TableName
-RegionServerCallable.getTableName() 
+AsyncProcessTask.getTableName() 
 
 
 TableName

[12/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 3ce8423..a20725d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -575,25 +575,25 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 HBaseAdmin.enableTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String regex) 
 
 
-HTableDescriptor
-HTable.getTableDescriptor()
-Gets the table descriptor for 
this table.
-
-
-
 protected HTableDescriptor
 HBaseAdmin.CreateTableFuture.getTableDescriptor() 
 
-
+
 protected HTableDescriptor
 HBaseAdmin.TableFuture.getTableDescriptor() 
 
-
+
 HTableDescriptor
 Table.getTableDescriptor()
 Gets the table descriptor for 
this table.
 
 
+
+HTableDescriptor
+HTable.getTableDescriptor()
+Gets the table descriptor for 
this table.
+
+
 
 HTableDescriptor
 HTableWrapper.getTableDescriptor() 
@@ -718,14 +718,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
-AsyncHBaseAdmin.getTableDescriptor(TableName tableName) 
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
 AsyncAdmin.getTableDescriptor(TableName tableName)
 Method for getting the tableDescriptor
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFuture
+AsyncHBaseAdmin.getTableDescriptor(TableName tableName) 
+
 
 
 
@@ -736,57 +736,50 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncHBaseAdmin.createTable(HTableDescriptor desc) 
-
-
 void
 Admin.createTable(HTableDescriptor desc)
 Creates a new table.
 
 
-
+
 void
 HBaseAdmin.createTable(HTableDescriptor desc) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncAdmin.createTable(HTableDescriptor desc)
 Creates a new table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncHBaseAdmin.createTable(HTableDescriptor desc,
-   byte[][] splitKeys) 
+AsyncHBaseAdmin.createTable(HTableDescriptor desc) 
 
-
+
 void
 Admin.createTable(HTableDescriptor desc,
byte[][] splitKeys)
 Creates a new table with an initial set of empty regions 
defined by the specified split keys.
 
 
-
+
 void
 HBaseAdmin.createTable(HTableDescriptor desc,
byte[][] splitKeys) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncAdmin.createTable(HTableDescriptor desc,
byte[][] splitKeys)
 Creates a new table with an initial set of empty regions 
defined by the specified split keys.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
-AsyncHBaseAdmin.createTable(HTableDescriptor desc,
-   byte[] startKey,
-   byte[] endKey,
-   int numRegions) 
+AsyncHBaseAdmin.createTable(HTableDescriptor desc,
+   byte[][] splitKeys) 
 
-
+
 void
 Admin.createTable(HTableDescriptor desc,
byte[] startK

[22/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html
deleted file mode 100644
index 363e4ac..000
--- a/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html
+++ /dev/null
@@ -1,1360 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-OffheapKeyValue (Apache HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase
-Class OffheapKeyValue
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.ByteBufferCell
-
-
-org.apache.hadoop.hbase.OffheapKeyValue
-
-
-
-
-
-
-
-
-
-All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true";
 title="class or interface in java.lang">Cloneable, Cell, ExtendedCell, HeapSize, SettableSequenceId, SettableTimestamp
-
-
-Direct Known Subclasses:
-RedundantKVGenerator.ExtendedOffheapKeyValue
-
-
-
-@InterfaceAudience.Private
-public class OffheapKeyValue
-extends ByteBufferCell
-implements ExtendedCell
-This Cell is an implementation of ByteBufferCell where the data resides 
in off heap
- memory.
-
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields 
-
-Modifier and Type
-Field and Description
-
-
-protected http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-buf 
-
-
-private static int
-FIXED_OVERHEAD 
-
-
-protected boolean
-hasTags 
-
-
-private int
-keyLen 
-
-
-protected int
-length 
-
-
-protected int
-offset 
-
-
-private short
-rowLen 
-
-
-private long
-seqId 
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors 
-
-Constructor and Description
-
-
-OffheapKeyValue(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer buf,
-   int offset,
-   int length) 
-
-
-OffheapKeyValue(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer buf,
-   int offset,
-   int length,
-   boolean hasTags,
-   long seqId) 
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All Methods Instance Methods Concrete Methods 
-
-Modifier and Type
-Method and Description
-
-
-private int
-calculateHashForKey(ByteBufferCell cell) 
-
-
-Cell
-deepClone()
-Does a deep copy of the contents to a new memory area and 
returns it as a new cell.
-
-
-
-boolean
-equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object other)
-Needed doing 'contains' on List.
-
-
-
-byte[]
-getFamilyArray()
-Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
- containing array.
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-getFamilyByteBuffer() 
-
-
-byte
-getFamilyLength() 
-
-
-private byte
-getFamilyLength(int famLenPos) 
-
-
-private int
-getFamilyLengthPosition() 
-
-
-int
-getFamilyOffset() 
-
-
-int
-getFamilyPosition() 
-
-
-byte[]
-getQualifierArray()
-Contiguous raw bytes that may start at any index in the 
containing array.
-
-
-
-http://docs.oracle.com/jav

[25/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 208c737..e8212bd 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -373,16 +373,26 @@
 
 
 
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.addPeer(String,
 ReplicationPeerConfig)
+use
+ Admin.addReplicationPeer(String,
 ReplicationPeerConfig)
+ instead
+
+
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.addPeer(String,
 ReplicationPeerConfig, Map>)
 as release of 2.0.0, and 
it will be removed in 3.0.0,
  use ReplicationAdmin.addPeer(String,
 ReplicationPeerConfig) instead.
 
 
-
+
 org.apache.hadoop.hbase.MetaTableAccessor.allTableRegions(Connection,
 TableName)
 use MetaTableAccessor.getTableRegionsAndLocations(org.apache.hadoop.hbase.client.Connection,
 org.apache.hadoop.hbase.TableName), region can have multiple 
locations
 
 
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.appendPeerTableCFs(String,
 Map>)
+
 
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.appendPeerTableCFs(String,
 String)
 as release of 2.0.0, and 
it will be removed in 3.0.0,
@@ -434,24 +444,18 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.coprocessor.ObserverContext.createAndPrepare(T,
 ObserverContext)
 
 
-org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowBefore(byte[])
-in fact, we do not know 
the closest row before the given row, the result is only a
- row very close to the current row. Avoid using this method in the 
future.
-
-
-
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989";>HBASE-1989).
@@ -459,12 +463,18 @@
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
+
 org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
 Since 2.0. Will be removed 
in 3.0. Use
  HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
 
 
+
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disablePeer(String)
+use Admin.disableReplicationPeer(String)
+ instead
+
+
 
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disableTableRep(TableName)
 use Admin.disableTableReplication(TableName)
@@ -472,155 +482,183 @@
 
 
 
+org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enablePeer(String)
+use Admin.enableReplicationPeer(String)
+ instead
+
+
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enableTableRep(TableName)
 use Admin.enableTableReplication(TableName)
  instead
 
 
-
+
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValue(Cell)
 without any 
replacement.
 
 
-
+
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(List)
 
-
+
 org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
 use #execute(Server, 
RegionServerServices, User)
 
 
-
+
 org.apache.hadoop.hbase.client.HTableWrapper.exists(List)
 Use HTableWrapper.existsAll(java.util.List)
  instead. since 2.0.  remove in 3.0
 
 
-
+
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
-
+
 org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
  Instead use FilterBase.filterRowKey(Cell)
 
 
-
+
 org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
  Instead use Filter.filterRowKey(Cell)
 
 
-
+
 org.apache.hadoop.hbase.client.HTableInterface.flushCommits()
 as of 1.0.0. Replaced by 
BufferedMutator.flush()
 
 
-
+
 org.apache.hadoop.hbase.client.Admin.getAlterStatus(byte[])
 Since 2.0.0. Will be 
removed in 3.0.0. Use Admin.getAlterStatus(TableName)
  instead.
 
 
-
+
 org.apache.hadoop.hbase.security.visibility.VisibilityClient.getAuths(Configuration,
 String)
 Use VisibilityClient.getAuths(Connection,String)
 instead.
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.getBuffer()
 Since 0.98.0.  Us

[09/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 00cacac..a12048d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -595,11 +595,19 @@
 
 
 private ServerName
+FastFailInterceptorContext.server 
+
+
+private ServerName
 AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
 
+
+private ServerName
+AsyncAdminRequestRetryingCaller.serverName 
+
 
 private ServerName
-FastFailInterceptorContext.server 
+AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.serverName 
 
 
 private ServerName
@@ -805,14 +813,14 @@
 
 
 void
-ConnectionImplementation.clearCaches(ServerName serverName) 
-
-
-void
 ClusterConnection.clearCaches(ServerName sn)
 Clear any caches that pertain to server name 
sn.
 
 
+
+void
+ConnectionImplementation.clearCaches(ServerName serverName) 
+
 
 void
 Admin.closeRegion(ServerName sn,
@@ -826,6 +834,18 @@
HRegionInfo hri) 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncAdmin.closeRegion(ServerName sn,
+   HRegionInfo hri)
+Close a region.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncHBaseAdmin.closeRegion(ServerName sn,
+   HRegionInfo hri) 
+
+
 private void
 HBaseAdmin.compact(ServerName sn,
HRegionInfo hri,
@@ -858,6 +878,10 @@
 HBaseAdmin.coprocessorService(ServerName serverName) 
 
 
+private 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interface
+AsyncConnectionImpl.createAdminServerStub(ServerName serverName) 
+
+
 private MultiServerCallable
 AsyncRequestFutureImpl.createCallable(ServerName server,
   TableName tableName,
@@ -865,7 +889,7 @@
 Create a callable.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 AsyncRequestFutureImpl.createLog(int numAttempt,
  int failureCount,
@@ -878,15 +902,15 @@
  int failed,
  int stopped) 
 
-
+
 private 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interface
 AsyncConnectionImpl.createMasterStub(ServerName serverName) 
 
-
+
 private 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface
 AsyncConnectionImpl.createRegionServerStub(ServerName serverName) 
 
-
+
 static ClusterConnection
 ConnectionUtils.createShortCircuitConnection(org.apache.hadoop.conf.Configuration conf,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool,
@@ -898,11 +922,6 @@
  deserialization, networking, etc..) when talking to a local server.
 
 
-
-void
-SimpleRequestController.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in 
java.util">Collection regions,
-   ServerName sn) 
-
 
 void
 RequestController.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in 
java.util">Collection regions,
@@ -911,21 +930,22 @@
 
 
 
+void
+SimpleRequestController.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in 
java.util">Collection regions,
+   ServerName sn) 
+
+
 (package private) void
 AsyncProcess.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in 
java.util">Collection regions,
ServerName sn) 
 
-
+
 private void
 AsyncBatchRpcRetryingCaller.failAll(http://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html?is-external=true";
 title="class or interface in java.util.stream">Stream actions,
int tries,
http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable error,
ServerName serverName) 
 
-
-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProto

[04/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
index 4e3ca9d..3cc7f24 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
@@ -244,47 +244,55 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  
 
 
-org.apache.hadoop.hbase.procedure
+org.apache.hadoop.hbase.net
  
 
 
-org.apache.hadoop.hbase.procedure2
+org.apache.hadoop.hbase.procedure
  
 
 
-org.apache.hadoop.hbase.procedure2.store
+org.apache.hadoop.hbase.procedure2
  
 
 
-org.apache.hadoop.hbase.procedure2.store.wal
+org.apache.hadoop.hbase.procedure2.store
  
 
 
-org.apache.hadoop.hbase.procedure2.util
+org.apache.hadoop.hbase.procedure2.store.wal
  
 
 
-org.apache.hadoop.hbase.quotas
+org.apache.hadoop.hbase.procedure2.util
  
 
 
-org.apache.hadoop.hbase.regionserver
+org.apache.hadoop.hbase.quotas
  
 
 
-org.apache.hadoop.hbase.regionserver.compactions
+org.apache.hadoop.hbase.regionserver
  
 
 
-org.apache.hadoop.hbase.regionserver.wal
+org.apache.hadoop.hbase.regionserver.compactions
  
 
 
+org.apache.hadoop.hbase.regionserver.wal
+ 
+
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
+
+org.apache.hadoop.hbase.replication.regionserver
+ 
+
 
 org.apache.hadoop.hbase.rsgroup
  
@@ -363,78 +371,85 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 class 
+ByteBufferTag
+This is a Tag implementation in which value is 
backed by
+ http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+
+
+
+class 
 CallDroppedException
 Returned to the clients when their request was discarded 
due to server being overloaded.
 
 
-
+
 class 
 CallQueueTooBigException 
 
-
+
 interface 
 Cell
 The unit of storage in HBase consisting of the following 
fields:
  
 
 
-
+
 class 
 CellComparator
 Compare two HBase cells.
 
 
-
+
 class 
 CellUtil
 Utility methods helpful slinging Cell instances.
 
 
-
+
 class 
 ClusterStatus
 Status information on the HBase cluster.
 
 
-
+
 interface 
 Coprocessor
 Coprocessor interface.
 
 
-
+
 class 
 HBaseInterfaceAudience
 This class defines constants for different classes of hbase 
limited private apis
 
 
-
+
 class 
 HBaseIOException
 All hbase specific IOExceptions should be subclasses of 
HBaseIOException
 
 
-
+
 class 
 HColumnDescriptor
 An HColumnDescriptor contains information about a column 
family such as the
  number of versions, compression settings, etc.
 
 
-
+
 class 
 HRegionInfo
 Information about a region.
 
 
-
+
 class 
 HRegionLocation
 Data structure to hold HRegionInfo and the address for the 
hosting
  HRegionServer.
 
 
-
+
 class 
 HTableDescriptor
 HTableDescriptor contains the details about an HBase table  
such as the descriptors of
@@ -443,54 +458,47 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  when the region split should occur, coprocessors associated with it 
etc...
 
 
-
+
 class 
 InvalidFamilyOperationException
 Thrown if a request is table schema modification is 
requested but
  made for an invalid family name.
 
 
-
+
 class 
 KeepDeletedCells
 Ways to keep cells marked for delete around.
 
 
-
+
 class 
 LocalHBaseCluster
 This class creates a single process HBase cluster.
 
 
-
+
 class 
 MemoryCompactionPolicy
 Enum describing all possible memory compaction 
policies
 
 
-
+
 class 
 MultiActionResultTooLarge
 Exception thrown when the result needs to be chunked on the 
server side.
 
 
-
+
 class 
 NamespaceDescriptor
 Namespace POJO class.
 
 
-
+
 static class 
 NamespaceDescriptor.Builder 
 
-
-class 
-OffheapTag
-This is a Tag implementation in which value is 
backed by an off heap
- http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-
-
 
 class 
 ProcedureInfo
@@ -535,7 +543,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 class 
 ServerName
-Instance of an HBase ServerName.
+Name of a particular incarnation of an HBase Server.
 
 
 
@@ -1190,16 +1198,23 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 interface 
+RegionServerObserver
+Defines coprocessor hooks for interacting with operations 
on the
+ HRegionServer 
process.
+
+
+
+interface 
 SingletonCoprocessorService
 Coprocessor endpoints registered once per server and 
providing protobuf services should implement
  this interface and return t

[02/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
index 154a327..2057519 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -137,74 +137,78 @@ implements Field and Description
 
 
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interface>
+adminSubs 
+
+
 (package private) AsyncRpcRetryingCallerFactory
 callerFactory 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 clusterId 
 
-
+
 private 
org.apache.hadoop.conf.Configuration
 conf 
 
-
+
 (package private) AsyncConnectionConfiguration
 connConf 
 
-
+
 private boolean
 hostnameCanChange 
 
-
+
 private AsyncRegionLocator
 locator 
 
-
+
 private static 
org.apache.commons.logging.Log
 LOG 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicReference
 masterStub 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceCompletableFuture>
 masterStubMakeFuture 
 
-
+
 private NonceGenerator
 nonceGenerator 
 
-
+
 (package private) AsyncRegistry
 registry 
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 RESOLVE_HOSTNAME_ON_FAIL_KEY 
 
-
+
 (package private) static 
io.netty.util.HashedWheelTimer
 RETRY_TIMER 
 
-
+
 private RpcClient
 rpcClient 
 
-
+
 (package private) RpcControllerFactory
 rpcControllerFactory 
 
-
+
 private int
 rpcTimeout 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface>
 rsStubs 
 
-
+
 private User
 user 
 
@@ -246,65 +250,73 @@ implements close() 
 
 
+private 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interface
+createAdminServerStub(ServerName serverName) 
+
+
 private 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interface
 createMasterStub(ServerName serverName) 
 
-
+
 private 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface
 createRegionServerStub(ServerName serverName) 
 
-
+
 AsyncAdmin
 getAdmin()
 Retrieve an AsyncAdmin implementation to administer an 
HBase cluster.
 
 
-
+
+(package private) 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interface
+getAdminStub(ServerName serverName) 
+
+
 org.apache.hadoop.conf.Configuration
 getConfiguration()
 Returns the Configuration object used by this 
instance.
 
 
-
+
 (package private) AsyncRegionLocator
 getLocator() 
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture
 getMasterStub() 
 
-
+
 NonceGenerator
 getNonceGenerator() 
 
-
+
 AsyncTableBuilder

[06/52] [partial] hbase-site git commit: Published site at 7763dd6688254d37ad611f5d290db47c83cf93d3.

2017-02-17 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ce958bce/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 12c6921..98baa6b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -131,8 +131,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 class 
-OffheapTag
-This is a Tag implementation in which value is 
backed by an off heap
+ByteBufferTag
+This is a Tag implementation in which value is 
backed by
  http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
 
 
@@ -150,15 +150,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 KeyValue.EMPTY_ARRAY_LIST 
 
 
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+CellUtil.EMPTY_TAGS_ITR 
+
+
 (package private) static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 TagUtil.EMPTY_TAGS_ITR
 Iterator returned when no Tags.
 
 
-
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-CellUtil.EMPTY_TAGS_ITR 
-
 
 
 
@@ -758,17 +758,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
boolean withSerializationFormat,
-   boolean checkAuths)
-Creates tags corresponding to given visibility 
expression.
-
+   boolean checkAuths) 
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
boolean withSerializationFormat,
-   boolean checkAuths) 
+   boolean checkAuths)
+Creates tags corresponding to given visibility 
expression.
+
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -814,6 +814,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 byte[]
+DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags,
+  http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in 
java.lang">Byte serializationFormat) 
+
+
+byte[]
 VisibilityLabelService.encodeVisibilityForReplication(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List visTags,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in 
java.lang">Byte serializationFormat)
 Provides a way to modify the visibility tags of type TagType
@@ -823,11 +828,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  .replicate().
 
 
-
-byte[]
-DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags,
-  http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in 
java.lang">Byte serializationFormat) 
-
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
 VisibilityUtils.extractAndPartitionTags(Cell cell,
@@ -873,6 +873,20 @@ Input/OutputForm

hbase git commit: HBASE-17653 HBASE-17624 rsgroup synchronizations will (distributed) deadlock

2017-02-17 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master d7325185a -> b392de3e3


HBASE-17653 HBASE-17624 rsgroup synchronizations will (distributed) deadlock

This patch restores the regime instituted in original rsgroups patch
(HBASE-6721) where reading of rsgroup state runs unimpeded against
COW immutable Maps whereas mutation to state require exclusive locks
(updating the Maps of state when done). HBASE-17624 was
over-enthusiastic with its locking down of access making it likely
we'd deadlock.

Adds documentation on concurrency expectations.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b392de3e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b392de3e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b392de3e

Branch: refs/heads/master
Commit: b392de3e315aa260e2825484e418701919eb7622
Parents: d732518
Author: Michael Stack 
Authored: Thu Feb 16 16:06:11 2017 -0800
Committer: Michael Stack 
Committed: Fri Feb 17 14:45:56 2017 -0800

--
 .../hbase/rsgroup/RSGroupAdminServer.java   |  43 ++---
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java |   9 +-
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 160 +++
 .../hadoop/hbase/rsgroup/TestRSGroups.java  |  13 +-
 .../hbase/rsgroup/TestRSGroupsOfflineMode.java  |  10 +-
 5 files changed, 126 insertions(+), 109 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b392de3e/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 0bd6d0a..5a00ddb 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -75,11 +75,15 @@ public class RSGroupAdminServer implements RSGroupAdmin {
 
   @Override
   public RSGroupInfo getRSGroupInfoOfTable(TableName tableName) throws 
IOException {
+// We are reading across two Maps in the below with out synchronizing 
across
+// them; should be safe most of the time.
 String groupName = getRSGroupInfoManager().getRSGroupOfTable(tableName);
 return groupName == null? null: 
getRSGroupInfoManager().getRSGroup(groupName);
   }
 
   private void checkOnlineServersOnly(Set servers) throws 
ConstraintException {
+// This uglyness is because we only have Address, not ServerName.
+// Online servers are keyed by ServerName.
 Set onlineServers = new HashSet();
 for(ServerName server: 
master.getServerManager().getOnlineServers().keySet()) {
   onlineServers.add(server.getAddress());
@@ -152,29 +156,29 @@ public class RSGroupAdminServer implements RSGroupAdmin {
 }
 RSGroupInfo targetGrp = getAndCheckRSGroupInfo(targetGroupName);
 RSGroupInfoManager manager = getRSGroupInfoManager();
-// Lock the manager during the below manipulations.
+// Hold a lock on the manager instance while moving servers to prevent
+// another writer changing our state while we are working.
 synchronized (manager) {
   if (master.getMasterCoprocessorHost() != null) {
 master.getMasterCoprocessorHost().preMoveServers(servers, 
targetGroupName);
   }
-  // Presume first server is the source group. Later we check all servers 
are from
-  // this same group.
+  // Presume first server's source group. Later ensure all servers are 
from this group.
   Address firstServer = servers.iterator().next();
   RSGroupInfo srcGrp = manager.getRSGroupOfServer(firstServer);
   if (srcGrp == null) {
-// Be careful. This message is tested for in TestRSGroupsBase...
+// Be careful. This exception message is tested for in 
TestRSGroupsBase...
 throw new ConstraintException("Source RSGroup for server " + 
firstServer + " does not exist.");
   }
   if (srcGrp.getName().equals(targetGroupName)) {
 throw new ConstraintException( "Target RSGroup " + targetGroupName +
 " is same as source " + srcGrp + " RSGroup.");
   }
-  // Only move online servers (when from 'default') or servers from other 
groups.
-  // This prevents bogus servers from entering groups
+  // Only move online servers (when moving from 'default') or servers from 
other
+  // groups. This prevents bogus servers from entering groups
   if (RSGroupInfo.DEFAULT_GROUP.equals(srcGrp.getName())) {
 checkOnlineServersOnly(servers);
   }
-  // Check all servers are of same rsgroup.
+  // Ensure all servers are of same rsgroup.
   for (Address server: servers) {

hbase git commit: HBASE-17172 Optimize major mob compaction with _del files

2017-02-17 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 938aef772 -> d7325185a


HBASE-17172 Optimize major mob compaction with _del files

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d7325185
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d7325185
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d7325185

Branch: refs/heads/master
Commit: d7325185ad2864ed8fc78bb362776648a75c951b
Parents: 938aef7
Author: Huaxiang Sun 
Authored: Fri Jan 6 09:25:49 2017 -0800
Committer: tedyu 
Committed: Fri Feb 17 14:22:31 2017 -0800

--
 .../PartitionedMobCompactionRequest.java| 158 +-
 .../compactions/PartitionedMobCompactor.java| 258 ---
 .../TestPartitionedMobCompactor.java| 315 +--
 3 files changed, 661 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d7325185/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
index 3335149..3292d99 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
@@ -24,8 +24,11 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.mob.MobConstants;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
 /**
@@ -37,14 +40,14 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 @InterfaceAudience.Private
 public class PartitionedMobCompactionRequest extends MobCompactionRequest {
 
-  protected Collection delFiles;
+  protected List delPartitions;
   protected Collection compactionPartitions;
 
   public PartitionedMobCompactionRequest(Collection 
compactionPartitions,
-Collection delFiles) {
+List delPartitions) {
 this.selectionTime = EnvironmentEdgeManager.currentTime();
 this.compactionPartitions = compactionPartitions;
-this.delFiles = delFiles;
+this.delPartitions = delPartitions;
   }
 
   /**
@@ -59,8 +62,8 @@ public class PartitionedMobCompactionRequest extends 
MobCompactionRequest {
* Gets the del files.
* @return The del files.
*/
-  public Collection getDelFiles() {
-return this.delFiles;
+  public List getDelPartitions() {
+return this.delPartitions;
   }
 
   /**
@@ -72,6 +75,10 @@ public class PartitionedMobCompactionRequest extends 
MobCompactionRequest {
 private List files = new ArrayList();
 private CompactionPartitionId partitionId;
 
+// The startKey and endKey of this partition, both are inclusive.
+private byte[] startKey;
+private byte[] endKey;
+
 public CompactionPartition(CompactionPartitionId partitionId) {
   this.partitionId = partitionId;
 }
@@ -91,6 +98,35 @@ public class PartitionedMobCompactionRequest extends 
MobCompactionRequest {
 public int getFileCount () {
   return files.size();
 }
+
+public byte[] getStartKey() {
+  return startKey;
+}
+
+/**
+ * Set start key of this partition, only if the input startKey is less than
+ * the current start key.
+ */
+public void setStartKey(final byte[] startKey)
+{
+  if ((this.startKey == null) || (Bytes.compareTo(startKey, this.startKey) 
< 0)) {
+this.startKey = startKey;
+  }
+}
+
+public byte[] getEndKey() {
+  return endKey;
+}
+
+/**
+ * Set end key of this partition, only if the input endKey is greater than
+ * the current end key.
+ */
+public void setEndKey(final byte[] endKey) {
+  if ((this.endKey == null) || (Bytes.compareTo(endKey, this.endKey) > 0)) 
{
+this.endKey = endKey;
+  }
+}
   }
 
   /**
@@ -183,4 +219,116 @@ public class PartitionedMobCompactionRequest extends 
MobCompactionRequest {
   return new StringBuilder(startKey).append(date).toString();
 }
   }
+
+  /**
+   * The delete file partition in the mob compaction.
+   * The delete partition is defined as [startKey, endKey] pair.
+   * The mob delete files that have the same start key and end key belong to
+   * the same partition.
+   */
+  protected static class CompactionDelPartition {
+pri

hbase git commit: HBASE-17025 Add shell commands for space quotas

2017-02-17 Thread elserj
Repository: hbase
Updated Branches:
  refs/heads/HBASE-16961 1b7c13430 -> d87e66d7d


HBASE-17025 Add shell commands for space quotas


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d87e66d7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d87e66d7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d87e66d7

Branch: refs/heads/HBASE-16961
Commit: d87e66d7d62e6d76de813ec7ca5131b668e4103a
Parents: 1b7c134
Author: Josh Elser 
Authored: Wed Jan 11 11:55:29 2017 -0500
Committer: Josh Elser 
Committed: Fri Feb 17 11:33:16 2017 -0500

--
 hbase-shell/src/main/ruby/hbase/quotas.rb   |  62 -
 hbase-shell/src/main/ruby/hbase_constants.rb|   1 +
 .../src/main/ruby/shell/commands/set_quota.rb   |  45 +-
 .../hadoop/hbase/client/AbstractTestShell.java  |   1 +
 hbase-shell/src/test/ruby/hbase/quotas_test.rb  | 137 +++
 hbase-shell/src/test/ruby/tests_runner.rb   |   1 +
 6 files changed, 242 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d87e66d7/hbase-shell/src/main/ruby/hbase/quotas.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/quotas.rb 
b/hbase-shell/src/main/ruby/hbase/quotas.rb
index bf2dc63..d99fe72 100644
--- a/hbase-shell/src/main/ruby/hbase/quotas.rb
+++ b/hbase-shell/src/main/ruby/hbase/quotas.rb
@@ -24,14 +24,22 @@ java_import org.apache.hadoop.hbase.quotas.ThrottleType
 java_import org.apache.hadoop.hbase.quotas.QuotaFilter
 java_import org.apache.hadoop.hbase.quotas.QuotaRetriever
 java_import org.apache.hadoop.hbase.quotas.QuotaSettingsFactory
+java_import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 
 module HBaseQuotasConstants
+  # RPC Quota constants
   GLOBAL_BYPASS = 'GLOBAL_BYPASS'
   THROTTLE_TYPE = 'THROTTLE_TYPE'
   THROTTLE = 'THROTTLE'
   REQUEST = 'REQUEST'
   WRITE = 'WRITE'
   READ = 'READ'
+  # Space quota constants
+  SPACE = 'SPACE'
+  NO_INSERTS = 'NO_INSERTS'
+  NO_WRITES = 'NO_WRITES'
+  NO_WRITES_COMPACTIONS = 'NO_WRITES_COMPACTIONS'
+  DISABLE = 'DISABLE'
 end
 
 module Hbase
@@ -107,6 +115,54 @@ module Hbase
   @admin.setQuota(settings)
 end
 
+def limit_space(args)
+  raise(ArgumentError, 'Argument should be a Hash') unless (not args.nil? 
and args.kind_of?(Hash))
+  # Let the user provide a raw number
+  if args[LIMIT].is_a?(Numeric)
+limit = args[LIMIT]
+  else
+# Parse a string a 1K, 2G, etc.
+limit = _parse_size(args[LIMIT])
+  end
+  # Extract the policy, failing if something bogus was provided
+  policy = SpaceViolationPolicy.valueOf(args[POLICY])
+  # Create a table or namespace quota
+  if args.key?(TABLE)
+if args.key?(NAMESPACE)
+  raise(ArgumentError, "Only one of TABLE or NAMESPACE can be 
specified.")
+end
+settings = 
QuotaSettingsFactory.limitTableSpace(TableName.valueOf(args.delete(TABLE)), 
limit, policy)
+  elsif args.key?(NAMESPACE)
+if args.key?(TABLE)
+  raise(ArgumentError, "Only one of TABLE or NAMESPACE can be 
specified.")
+end
+settings = 
QuotaSettingsFactory.limitNamespaceSpace(args.delete(NAMESPACE), limit, policy)
+  else
+raise(ArgumentError, 'One of TABLE or NAMESPACE must be specified.')
+  end
+  # Apply the quota
+  @admin.setQuota(settings)
+end
+
+def remove_space_limit(args)
+  raise(ArgumentError, 'Argument should be a Hash') unless (not args.nil? 
and args.kind_of?(Hash))
+  if args.key?(TABLE)
+if args.key?(NAMESPACE)
+  raise(ArgumentError, "Only one of TABLE or NAMESPACE can be 
specified.")
+end
+table = TableName.valueOf(args.delete(TABLE))
+settings = QuotaSettingsFactory.removeTableSpaceLimit(table)
+  elsif args.key?(NAMESPACE)
+if args.key?(TABLE)
+  raise(ArgumentError, "Only one of TABLE or NAMESPACE can be 
specified.")
+end
+settings = 
QuotaSettingsFactory.removeNamespaceSpaceLimit(args.delete(NAMESPACE))
+  else
+raise(ArgumentError, 'One of TABLE or NAMESPACE must be specified.')
+  end
+  @admin.setQuota(settings)
+end
+
 def set_global_bypass(bypass, args)
   raise(ArgumentError, "Arguments should be a Hash") unless 
args.kind_of?(Hash)
 
@@ -171,7 +227,7 @@ module Hbase
   return _size_from_str(match[1].to_i, match[2])
 end
   else
-raise "Invalid size limit syntax"
+raise(ArgumentError, "Invalid size limit syntax")
   end
 end
 
@@ -188,7 +244,7 @@ module Hbase
 end
 
 if limit <= 0
-  raise "Invalid throttle limit, must be greater then 0"
+  raise(ArgumentError, "Invalid throttle limit, mus

hbase git commit: HBASE-17658 Fix bookkeeping error with max regions for a table

2017-02-17 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 b141603e2 -> 6b96b0ce8


HBASE-17658 Fix bookkeeping error with max regions for a table

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6b96b0ce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6b96b0ce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6b96b0ce

Branch: refs/heads/branch-1
Commit: 6b96b0ce853b83cf445018292217f736fbff906d
Parents: b141603
Author: Tim Brown 
Authored: Thu Jan 19 14:21:24 2017 -0800
Committer: tedyu 
Committed: Fri Feb 17 07:42:21 2017 -0800

--
 .../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java  | 2 +-
 .../apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java  | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6b96b0ce/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index 498d03d..27c4a89 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -661,7 +661,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 
   //check whether this caused maxRegionsPerTable in the new Server to be 
updated
   if (numRegionsPerServerPerTable[newServer][tableIndex] > 
numMaxRegionsPerTable[tableIndex]) {
-numRegionsPerServerPerTable[newServer][tableIndex] = 
numMaxRegionsPerTable[tableIndex];
+numMaxRegionsPerTable[tableIndex] = 
numRegionsPerServerPerTable[newServer][tableIndex];
   } else if (oldServer >= 0 && 
(numRegionsPerServerPerTable[oldServer][tableIndex] + 1)
   == numMaxRegionsPerTable[tableIndex]) {
 //recompute maxRegionsPerTable since the previous value was coming 
from the old server

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b96b0ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
index e3523b1..2823d64 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
@@ -326,6 +326,8 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
 
 // now move region1 from servers[0] to servers[2]
 cluster.doAction(new MoveRegionAction(0, 0, 2));
+// check that the numMaxRegionsPerTable for "table" has increased to 2
+assertEquals(2, cluster.numMaxRegionsPerTable[0]);
 // now repeat check whether moving region1 from servers[1] to servers[2]
 // would lower availability
 assertTrue(cluster.wouldLowerAvailability(hri1, servers[2]));



hbase git commit: HBASE-17658 Fix bookkeeping error with max regions for a table

2017-02-17 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master a2d2196e8 -> 938aef772


HBASE-17658 Fix bookkeeping error with max regions for a table

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/938aef77
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/938aef77
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/938aef77

Branch: refs/heads/master
Commit: 938aef772d82f7a52228436b141e62af66e11e76
Parents: a2d2196
Author: Tim Brown 
Authored: Thu Jan 19 14:21:24 2017 -0800
Committer: tedyu 
Committed: Fri Feb 17 07:40:16 2017 -0800

--
 .../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java  | 2 +-
 .../apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java  | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/938aef77/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index 5893323..f27feb3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -667,7 +667,7 @@ public abstract class BaseLoadBalancer implements 
LoadBalancer {
 
   //check whether this caused maxRegionsPerTable in the new Server to be 
updated
   if (numRegionsPerServerPerTable[newServer][tableIndex] > 
numMaxRegionsPerTable[tableIndex]) {
-numRegionsPerServerPerTable[newServer][tableIndex] = 
numMaxRegionsPerTable[tableIndex];
+numMaxRegionsPerTable[tableIndex] = 
numRegionsPerServerPerTable[newServer][tableIndex];
   } else if (oldServer >= 0 && 
(numRegionsPerServerPerTable[oldServer][tableIndex] + 1)
   == numMaxRegionsPerTable[tableIndex]) {
 //recompute maxRegionsPerTable since the previous value was coming 
from the old server

http://git-wip-us.apache.org/repos/asf/hbase/blob/938aef77/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
index 554fec5..02032fd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
@@ -366,6 +366,8 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
 
 // now move region1 from servers[0] to servers[2]
 cluster.doAction(new MoveRegionAction(0, 0, 2));
+// check that the numMaxRegionsPerTable for "table" has increased to 2
+assertEquals(2, cluster.numMaxRegionsPerTable[0]);
 // now repeat check whether moving region1 from servers[1] to servers[2]
 // would lower availability
 assertTrue(cluster.wouldLowerAvailability(hri1, servers[2]));



hbase git commit: HBASE-17648: HBase Table-level synchronization fails between two secured(kerberized) cluster

2017-02-17 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 403891723 -> b141603e2


HBASE-17648: HBase Table-level synchronization fails between two 
secured(kerberized) cluster

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b141603e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b141603e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b141603e

Branch: refs/heads/branch-1
Commit: b141603e2e70c80d28d91eeadb8b584993f953b4
Parents: 4038917
Author: Yi Liang 
Authored: Tue Feb 14 10:22:22 2017 -0800
Committer: tedyu 
Committed: Fri Feb 17 07:04:35 2017 -0800

--
 .../main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b141603e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
index 27518ca..32e3b00 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
@@ -144,6 +144,12 @@ public class SyncTable extends Configured implements Tool {
   // would be nice to add an option for bulk load instead
 }
 
+// Obtain an authentication token, for the specified cluster, on behalf of 
the current user
+if (sourceZkCluster != null) {
+  Configuration peerConf =
+  HBaseConfiguration.createClusterConf(job.getConfiguration(), 
sourceZkCluster);
+  TableMapReduceUtil.initCredentialsForCluster(job, peerConf);
+}
 return job;
   }
 



hbase git commit: HBASE-17648: HBase Table-level synchronization fails between two secured(kerberized) cluster

2017-02-17 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 7763dd668 -> a2d2196e8


HBASE-17648: HBase Table-level synchronization fails between two 
secured(kerberized) cluster

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a2d2196e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a2d2196e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a2d2196e

Branch: refs/heads/master
Commit: a2d2196e89f4228c01724eef09c92f82b33c32ed
Parents: 7763dd6
Author: Yi Liang 
Authored: Tue Feb 14 10:22:22 2017 -0800
Committer: tedyu 
Committed: Fri Feb 17 07:03:53 2017 -0800

--
 .../main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a2d2196e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
index 574ec50..954194e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
@@ -144,6 +144,12 @@ public class SyncTable extends Configured implements Tool {
   // would be nice to add an option for bulk load instead
 }
 
+// Obtain an authentication token, for the specified cluster, on behalf of 
the current user
+if (sourceZkCluster != null) {
+  Configuration peerConf =
+  HBaseConfiguration.createClusterConf(job.getConfiguration(), 
sourceZkCluster);
+  TableMapReduceUtil.initCredentialsForCluster(job, peerConf);
+}
 return job;
   }
 



[2/2] hbase git commit: HBASE-17644 Always create ByteBufferCells after copying to MSLAB.

2017-02-17 Thread anoopsamjohn
HBASE-17644 Always create ByteBufferCells after copying to MSLAB.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7763dd66
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7763dd66
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7763dd66

Branch: refs/heads/master
Commit: 7763dd6688254d37ad611f5d290db47c83cf93d3
Parents: 3fc2889f
Author: anoopsamjohn 
Authored: Fri Feb 17 17:50:41 2017 +0530
Committer: anoopsamjohn 
Committed: Fri Feb 17 17:50:41 2017 +0530

--
 .../apache/hadoop/hbase/ByteBufferKeyValue.java | 348 +++
 .../org/apache/hadoop/hbase/ByteBufferTag.java  |  83 +
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  26 +-
 .../hadoop/hbase/NoTagsByteBufferKeyValue.java  |  62 
 .../apache/hadoop/hbase/OffheapKeyValue.java| 340 --
 .../org/apache/hadoop/hbase/OffheapTag.java |  83 -
 .../java/org/apache/hadoop/hbase/TagUtil.java   |   2 +-
 .../hadoop/hbase/codec/KeyValueCodec.java   |   4 +-
 .../hbase/codec/KeyValueCodecWithTags.java  |   4 +-
 .../hbase/io/encoding/RowIndexSeekerV1.java |   4 +-
 .../hbase/util/test/RedundantKVGenerator.java   |   8 +-
 .../hadoop/hbase/TestByteBufferKeyValue.java| 198 +++
 .../hadoop/hbase/TestOffheapKeyValue.java   | 198 ---
 .../hbase/io/TestTagCompressionContext.java |   4 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |   4 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |  36 +-
 .../hbase/filter/TestDependentColumnFilter.java |   2 +-
 .../io/hfile/TestScannerFromBucketCache.java|  18 +-
 .../hadoop/hbase/io/hfile/TestSeekTo.java   |   4 +-
 .../hadoop/hbase/protobuf/TestProtobufUtil.java |   4 +-
 .../regionserver/TestMemStoreChunkPool.java |   8 +-
 .../hbase/regionserver/TestMemStoreLAB.java |  20 +-
 .../hadoop/hbase/regionserver/TestTags.java |  14 +-
 .../wal/TestWALCellCodecWithCompression.java|   6 +-
 .../ExpAsStringVisibilityLabelServiceImpl.java  |   4 +-
 .../hbase/wal/TestWALReaderOnSecureWAL.java |   4 +-
 26 files changed, 777 insertions(+), 711 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7763dd66/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
new file mode 100644
index 000..43d9227
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.ByteBufferUtils;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassSize;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * This Cell is an implementation of {@link ByteBufferCell} where the data 
resides in
+ * off heap/ on heap ByteBuffer
+ */
+@InterfaceAudience.Private
+public class ByteBufferKeyValue extends ByteBufferCell implements ExtendedCell 
{
+
+  protected final ByteBuffer buf;
+  protected final int offset;
+  protected final int length;
+  private long seqId = 0;
+
+  private static final int FIXED_OVERHEAD = ClassSize.OBJECT + 
ClassSize.REFERENCE
+  + (2 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_LONG;
+
+  public ByteBufferKeyValue(ByteBuffer buf, int offset, int length, long 
seqId) {
+this.buf = buf;
+this.offset = offset;
+this.length = length;
+this.seqId = seqId;
+  }
+
+  public ByteBufferKeyValue(ByteBuffer buf, int offset, int length) {
+this.buf = buf;
+this.offset = offset;
+this.length = length;
+  }
+
+  @VisibleForTesting
+  public ByteBuffer getBuffer() {
+return this.bu

[1/2] hbase git commit: HBASE-17644 Always create ByteBufferCells after copying to MSLAB.

2017-02-17 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/master 3fc2889f7 -> 7763dd668


http://git-wip-us.apache.org/repos/asf/hbase/blob/7763dd66/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
index e866d48..6f4a797 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.OffheapKeyValue;
+import org.apache.hadoop.hbase.ByteBufferKeyValue;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
@@ -125,7 +125,7 @@ public class TestWALReaderOnSecureWAL {
 if (offheap) {
   ByteBuffer bb = ByteBuffer.allocateDirect(kv.getBuffer().length);
   bb.put(kv.getBuffer());
-  OffheapKeyValue offheapKV = new OffheapKeyValue(bb, 0, 
kv.getLength());
+  ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(bb, 0, 
kv.getLength());
   kvs.add(offheapKV);
 } else {
   kvs.add(kv);