hbase git commit: HBASE-17596 Implement add/delete/modify column family methods

2017-02-05 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master d22bfc036 -> 5093a49e0


HBASE-17596 Implement add/delete/modify column family methods


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5093a49e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5093a49e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5093a49e

Branch: refs/heads/master
Commit: 5093a49e0ba3e8f88e03ed8b81dd3abada402c3a
Parents: d22bfc0
Author: Guanghao Zhang 
Authored: Mon Feb 6 13:45:01 2017 +0800
Committer: Guanghao Zhang 
Committed: Mon Feb 6 13:45:01 2017 +0800

--
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  35 
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|  81 
 .../hadoop/hbase/client/TestAsyncAdmin.java | 192 +++
 3 files changed, 308 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5093a49e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index 29b98de..5c6a390 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.client;
 import java.util.concurrent.CompletableFuture;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Pair;
 
 /**
  *  The asynchronous administrative API for HBase.
@@ -217,6 +219,39 @@ public interface AsyncAdmin {
   CompletableFuture disableTables(Pattern pattern);
 
   /**
+   * Get the status of alter command - indicates how many regions have 
received the updated schema
+   * Asynchronous operation.
+   * @param tableName TableName instance
+   * @return Pair indicating the number of regions updated Pair.getFirst() is 
the regions that are
+   * yet to be updated Pair.getSecond() is the total number of regions 
of the table. The
+   * return value will be wrapped by a {@link CompletableFuture}.
+   */
+  CompletableFuture> getAlterStatus(final TableName 
tableName);
+
+  /**
+   * Add a column family to an existing table.
+   * @param tableName name of the table to add column family to
+   * @param columnFamily column family descriptor of column family to be added
+   */
+  CompletableFuture addColumnFamily(final TableName tableName,
+  final HColumnDescriptor columnFamily);
+
+  /**
+   * Delete a column family from a table.
+   * @param tableName name of table
+   * @param columnFamily name of column family to be deleted
+   */
+  CompletableFuture deleteColumnFamily(final TableName tableName, final 
byte[] columnFamily);
+
+  /**
+   * Modify an existing column family on a table.
+   * @param tableName name of table
+   * @param columnFamily new column family descriptor to use
+   */
+  CompletableFuture modifyColumnFamily(final TableName tableName,
+  final HColumnDescriptor columnFamily);
+
+  /**
* Turn the load balancer on or off.
* @param on
* @return Previous balancer value wrapped by a {@link CompletableFuture}.

http://git-wip-us.apache.org/repos/asf/hbase/blob/5093a49e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
index fecdb4f..89cd276 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
@@ -30,6 +30,7 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
@@ -43,14 +44,20 @@ import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
+import 
org.apache.hadoop.hbase.shaded.protobuf.g

hbase git commit: HBASE-17349 Add doc for regionserver group-based assignment

2017-02-05 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 26a94844f -> d22bfc036


HBASE-17349 Add doc for regionserver group-based assignment


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d22bfc03
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d22bfc03
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d22bfc03

Branch: refs/heads/master
Commit: d22bfc0367cff9fbc397a889e89bb6363b07e807
Parents: 26a9484
Author: Michael Stack 
Authored: Thu Feb 2 15:55:12 2017 -0800
Committer: Michael Stack 
Committed: Sun Feb 5 08:36:33 2017 -0800

--
 src/main/asciidoc/_chapters/ops_mgt.adoc | 148 ++
 1 file changed, 148 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d22bfc03/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index b156ee5..e4c077f 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -2349,3 +2349,151 @@ void rename(Admin admin, String oldTableName, TableName 
newTableName) {
   admin.deleteTable(oldTableName);
 }
 
+
+[[rsgroup]]
+== RegionServer Grouping
+RegionServer Grouping (A.K.A `rsgroup`) is an advanced feature for
+partitioning regionservers into distinctive groups for strict isolation. It
+should only be used by users who are sophisticated enough to understand the
+full implications and have a sufficient background in managing HBase clusters. 
+It was developed by Yahoo! and they run it at scale on their large grid 
cluster.
+See 
link:http://www.slideshare.net/HBaseCon/keynote-apache-hbase-at-yahoo-scale[HBase
 at Yahoo! Scale].
+
+RSGroups can be defined and managed with shell commands or corresponding Java
+APIs. A server can be added to a group with hostname and port pair and tables
+can be moved to this group so that only regionservers in the same rsgroup can
+host the regions of the table. RegionServers and tables can only belong to one
+rsgroup at a time. By default, all tables and regionservers belong to the
+`default` rsgroup. System tables can also be put into a rsgroup using the 
regular
+APIs. A custom balancer implementation tracks assignments per rsgroup and makes
+sure to move regions to the relevant regionservers in that rsgroup. The rsgroup
+information is stored in a regular HBase table, and a zookeeper-based read-only
+cache is used at cluster bootstrap time. 
+
+To enable, add the following to your hbase-site.xml and restart your Master: 
+
+[source,xml]
+
+  
+   hbase.coprocessor.master.classes 
+   org.apache.hadoop.hbase.rsgroup.RSGroupAdminEndpoint 
+  
+  
+   hbase.master.loadbalancer.class 
+   org.apache.hadoop.hbase.rsgroup.RSGroupBasedLoadBalancer 
+  
+
+
+Then use the shell _rsgroup_ commands to create and manipulate RegionServer
+groups: e.g. to add a rsgroup and then add a server to it. To see the list of
+rsgroup commands available in the hbase shell type:
+
+[source, bash]
+
+ hbase(main):008:0> help ‘rsgroup’
+ Took 0.5610 seconds 
+
+
+High level, you create a rsgroup that is other than the `default` group using
+_add_rsgroup_ command. You then add servers and tables to this group with the
+_move_servers_rsgroup_ and _move_tables_rsgroup_ commands. If necessary, run
+a balance for the group if tables are slow to migrate to the groups dedicated
+server with the _balance_rsgroup_ command (Usually this is not needed). To
+monitor effect of the commands, see the `Tables` tab toward the end of the
+Master UI home page. If you click on a table, you can see what servers it is
+deployed across. You should see here a reflection of the grouping done with
+your shell commands. View the master log if issues.
+
+Here is example using a few of the rsgroup  commands. To add a group, do as 
follows:
+
+[source, bash]
+
+ hbase(main):008:0> add_rsgroup 'my_group' 
+ Took 0.5610 seconds 
+
+
+
+.RegionServer Groups must be Enabled
+[NOTE]
+
+If you have not enabled the rsgroup Coprocessor Endpoint in the master and
+you run the any of the rsgroup shell commands, you will see an error message
+like the below:
+
+[source,java]
+
+ERROR: org.apache.hadoop.hbase.exceptions.UnknownProtocolException: No 
registered master coprocessor service found for name RSGroupAdminService
+at 
org.apache.hadoop.hbase.master.MasterRpcServices.execMasterService(MasterRpcServices.java:604)
+at 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos$MasterService$2.callBlockingMethod(MasterProtos.java)
+at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:1140)
+at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:133)
+at 
org.apache.hadoop.hb

[50/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index d26b4c7..fa4300b 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Apache HBase (TM) ACID Properties
@@ -614,11 +614,11 @@ under the License. -->
 
 
   
-  Copyright ©
2007–2016
+  Copyright ©
2007–2017
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-12-07
+  Last Published: 
2017-02-05
 
 
 



[32/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html 
b/apidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
index 99cd7e1..240b0a9 100644
--- a/apidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/security/SecurityCapability.html
@@ -415,6 +415,6 @@ not permitted.)
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
 
b/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
index 842107a..71c800b 100644
--- 
a/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
+++ 
b/apidocs/org/apache/hadoop/hbase/client/security/class-use/SecurityCapability.html
@@ -199,6 +199,6 @@ the order they are declared.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/security/package-summary.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/security/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/client/security/package-summary.html
index e336fed..7430f25 100644
--- a/apidocs/org/apache/hadoop/hbase/client/security/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/client/security/package-summary.html
@@ -140,6 +140,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/security/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/security/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/client/security/package-tree.html
index a93a729..1bbf093 100644
--- a/apidocs/org/apache/hadoop/hbase/client/security/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/client/security/package-tree.html
@@ -137,6 +137,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/security/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/security/package-use.html 
b/apidocs/org/apache/hadoop/hbase/client/security/package-use.html
index 85bd005..fdac94e 100644
--- a/apidocs/org/apache/hadoop/hbase/client/security/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/client/security/package-use.html
@@ -178,6 +178,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/conf/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/conf/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/conf/package-summary.html
index 56d6276..7c29a6c 100644
--- a/apidocs/org/apache/hadoop/hbase/conf/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/conf/package-summary.html
@@ -119,6 +119,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/conf/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/conf/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/conf/package-tr

[38/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/Connection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Connection.html 
b/apidocs/org/apache/hadoop/hbase/client/Connection.html
index 864fb7b..e044901 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Connection.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Connection.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -144,7 +144,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 Method Summary
 
-All Methods Instance Methods Abstract Methods 
+All Methods Instance Methods Abstract Methods Default Methods 
 
 Modifier and Type
 Method and Description
@@ -183,19 +183,26 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 
-Table
+default Table
 getTable(TableName tableName)
 Retrieve a Table implementation for accessing a table.
 
 
 
-Table
+default Table
 getTable(TableName tableName,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Retrieve a Table implementation for accessing a table.
 
 
 
+TableBuilder
+getTableBuilder(TableName tableName,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
+Returns an TableBuilder for creating Table.
+
+
+
 boolean
 isClosed()
 Returns whether the connection is closed or not.
@@ -242,8 +249,8 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getTable
-Table getTable(TableName tableName)
-throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+default Table getTable(TableName tableName)
+throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Retrieve a Table implementation for accessing a table.
  The returned Table is not thread safe, a new instance should be created for 
each using thread.
  This is a lightweight operation, pooling or caching of the returned Table
@@ -271,9 +278,9 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getTable
-Table getTable(TableName tableName,
-   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
-throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+default Table getTable(TableName tableName,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
+throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Retrieve a Table implementation for accessing a table.
  The returned Table is not thread safe, a new instance should be created for 
each using thread.
  This is a lightweight operation, pooling or caching of the returned Table
@@ -302,7 +309,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getBufferedMutator
-BufferedMutator getBufferedMutator(TableName tableName)
+BufferedMutator getBufferedMutator(TableName tableName)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes. The
@@ -332,7 +339,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getBufferedMutator
-BufferedMutator getBufferedMutator(BufferedMutatorParams params)
+BufferedMutator getBufferedMutator(BufferedMutatorParams params)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Retrieve a BufferedMutator for performing 
client-side buffering of writes. The
  Buff

[49/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 9461ee6..37446bb 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20161207204616+00'00')
-/ModDate (D:20161207204616+00'00')
+/CreationDate (D:20170205144805+00'00')
+/ModDate (D:20170205144805+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 4052 0 R
-/PageLabels 4260 0 R
+/Outlines 4035 0 R
+/PageLabels 4242 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 676
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 55 
0 R 58 0 R 60 0 R 62 0 R 66 0 R 71 0 R 74 0 R 79 0 R 81 0 R 84 0 R 86 0 R 92 0 
R 101 0 R 106 0 R 108 0 R 129 0 R 135 0 R 142 0 R 144 0 R 148 0 R 151 0 R 161 0 
R 169 0 R 180 0 R 190 0 R 194 0 R 196 0 R 200 0 R 206 0 R 208 0 R 210 0 R 212 0 
R 214 0 R 217 0 R 223 0 R 226 0 R 228 0 R 230 0 R 232 0 R 234 0 R 236 0 R 238 0 
R 242 0 R 245 0 R 248 0 R 250 0 R 252 0 R 254 0 R 256 0 R 258 0 R 260 0 R 266 0 
R 269 0 R 271 0 R 273 0 R 275 0 R 280 0 R 285 0 R 290 0 R 294 0 R 297 0 R 312 0 
R 322 0 R 328 0 R 339 0 R 349 0 R 354 0 R 356 0 R 358 0 R 369 0 R 374 0 R 378 0 
R 383 0 R 387 0 R 398 0 R 410 0 R 424 0 R 434 0 R 436 0 R 438 0 R 443 0 R 453 0 
R 466 0 R 476 0 R 480 0 R 483 0 R 487 0 R 491 0 R 494 0 R 497 0 R 499 0 R 502 0 
R 506 0 R 508 0 R 513 0 R 517 0 R 523 0 R 527 0 R 529 0 R 535 0 R 537 0 R 541 0 
R 549 0 R 551 0 R 554 0 R 558 0 R 561 0 R 564 0 R 579 0 R 586 0 R 593 0 R 604 0 
R 610 0 R 618 0 R 626 0 R 629 0 R 633 0
  R 636 0 R 647 0 R 655 0 R 661 0 R 667 0 R 671 0 R 673 0 R 687 0 R 699 0 R 705 
0 R 711 0 R 714 0 R 723 0 R 731 0 R 735 0 R 740 0 R 746 0 R 748 0 R 750 0 R 752 
0 R 760 0 R 769 0 R 773 0 R 781 0 R 789 0 R 795 0 R 799 0 R 805 0 R 809 0 R 815 
0 R 823 0 R 825 0 R 829 0 R 834 0 R 841 0 R 844 0 R 851 0 R 860 0 R 864 0 R 866 
0 R 869 0 R 873 0 R 878 0 R 881 0 R 893 0 R 897 0 R 902 0 R 910 0 R 915 0 R 919 
0 R 924 0 R 926 0 R 929 0 R 931 0 R 935 0 R 937 0 R 940 0 R 944 0 R 948 0 R 953 
0 R 958 0 R 961 0 R 963 0 R 970 0 R 974 0 R 979 0 R 992 0 R 996 0 R 1000 0 R 
1005 0 R 1007 0 R 1016 0 R 1019 0 R 1024 0 R 1027 0 R 1036 0 R 1039 0 R 1045 0 
R 1052 0 R 1055 0 R 1057 0 R 1066 0 R 1068 0 R 1070 0 R 1073 0 R 1075 0 R 1077 
0 R 1079 0 R 1081 0 R 1083 0 R 1086 0 R 1089 0 R 1094 0 R 1097 0 R 1099 0 R 
1101 0 R 1103 0 R 1108 0 R 1117 0 R 1120 0 R 1122 0 R 1124 0 R 1129 0 R 1131 0 
R 1134 0 R 1136 0 R 1138 0 R 1140 0 R 1143 0 R 1148 0 R 1154 0 R 1161 0 R 1166 
0 R 1180 0 R 1191 0 R 1195 0 R 1210 0 R 1219 0 R 
 1233 0 R 1237 0 R 1247 0 R 1260 0 R 1264 0 R 1276 0 R 1285 0 R 1292 0 R 1296 0 
R 1305 0 R 1310 0 R 1314 0 R 1320 0 R 1326 0 R 1333 0 R 1341 0 R 1343 0 R 1355 
0 R 1357 0 R 1362 0 R 1366 0 R 1371 0 R 1381 0 R 1387 0 R 1393 0 R 1395 0 R 
1397 0 R 1409 0 R 1416 0 R 1426 0 R 1431 0 R 1444 0 R 1451 0 R 1454 0 R 1463 0 
R 1472 0 R 1477 0 R 1483 0 R 1487 0 R 1490 0 R 1492 0 R 1499 0 R 1502 0 R 1509 
0 R 1513 0 R 1516 0 R 1525 0 R 1529 0 R 1532 0 R 1534 0 R 1543 0 R 1550 0 R 
1556 0 R 1561 0 R 1565 0 R 1568 0 R 1574 0 R 1579 0 R 1584 0 R 1586 0 R 1588 0 
R 1591 0 R 1593 0 R 1601 0 R 1604 0 R 1610 0 R 1617 0 R 1621 0 R 1626 0 R 1629 
0 R 1631 0 R 1636 0 R 1639 0 R 1641 0 R 1643 0 R 1645 0 R 1651 0 R 1661 0 R 
1663 0 R 1665 0 R 1667 0 R 1669 0 R 1672 0 R 1674 0 R 1676 0 R 1678 0 R 1681 0 
R 1683 0 R 1685 0 R 1687 0 R 1691 0 R 1695 0 R 1704 0 R 1706 0 R 1708 0 R 1710 
0 R 1712 0 R 1719 0 R 1721 0 R 1726 0 R 1728 0 R 1730 0 R 1737 0 R 1742 0 R 
1746 0 R 1750 0 R 1753 0 R 1756 0 R 1760 0 R 1762 0 R 1765 0 
 R 1767 0 R 1769 0 R 1771 0 R 1775 0 R 1777 0 R 1781 0 R 1783 0 R 1785 0 R 1787 
0 R 1789 0 R 1797 0 R 1800 0 R 1805 0 R 1807 0 R 1809 0 R 1811 0 R 1813 0 R 
1821 0 R 1831 0 R 1834 0 R 1850 0 R 1865 0 R 1869 0 R 1874 0 R 1879 0 R 1882 0 
R 1887 0 R 1889 0 R 1894 0 R 1896 0 R 1899 0 R 1901 0 R 1903 0 R 1905 0 R 1907 
0 R 1911 0 R 1913 0 R 1920 0 R 1924 0 R 1928 0 R 1936 0 R 1942 0 R 1953 0 R 
1967 0 R 1980 0 R 1998 0 R 2002 0 R 2004 0 R 2008 0 R 2025 0 R 2033 0 R 2040 0 
R 2049 0 R 2053 0 R 2063 0 R 2074 0 R 2080 0 R 2089 0 R 2102 0 R 2119 0 R 2131 
0 R 2134 0 R 2143 0 R 2158 0 R 2165 0 R 2168 0 R 2173 0 R 2178 0 R 2188 0 R 
2196 0 R 2199 0 R 2201 0 R 2205 0 R 2220 0 R 2229 0 R 2234 0 R 2238 0 R 2241 0 
R 2243 0 R 2245 0 R 2247 0 R 2249 0 R 2254 0 R 2256 0 R 2266 0 R 2276 0 R 2283 
0 R 2295 0 R 2300 0 R 2304 0 R 2317 0 R 2324 0 R 2330 0 R 2332 0 R 2342 0 R 
2349 0 R 2360 0 R 2364 0 R 2373 0 R 2379 0 R 2389 0 R 2398 0 R 2406 0 R 2412 0 
R 2417 0 R 2421 0 R 2424 0 R 2426 0 R 2432 0 R 2436 0 R 2440 
 0 R 2446 0 R 2453 0 R 2458 0 R 2462 0 R 2

[36/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/ScanResultConsumer.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/ScanResultConsumer.html 
b/apidocs/org/apache/hadoop/hbase/client/ScanResultConsumer.html
index 72fc397..a9fe429 100644
--- a/apidocs/org/apache/hadoop/hbase/client/ScanResultConsumer.html
+++ b/apidocs/org/apache/hadoop/hbase/client/ScanResultConsumer.html
@@ -260,6 +260,6 @@ public interface Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/ScannerTimeoutException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/ScannerTimeoutException.html 
b/apidocs/org/apache/hadoop/hbase/client/ScannerTimeoutException.html
index e98f47a..a18512a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/ScannerTimeoutException.html
+++ b/apidocs/org/apache/hadoop/hbase/client/ScannerTimeoutException.html
@@ -43,7 +43,7 @@
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -195,7 +195,7 @@ extends 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -236,6 +236,6 @@ extends Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html 
b/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
index 030c14a..b175d8b 100644
--- a/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
+++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
@@ -548,6 +548,6 @@ public http://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html 
b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
index f49bc22..f75cc8a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
@@ -352,6 +352,6 @@ not permitted.)
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Table.html 
b/apidocs/org/apache/hadoop/hbase/client/Table.html
index 5f98b3b..26978a2 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Table.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Table.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":38,"i25":6,"i26":6,"i27":6,"i28":6,"i29":38,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":38,"i40":38,"i41":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":38,"i25":6,"i26":6,"i27":6,"i28":6,"i29":38,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":38,"i38":38,"i39":38,"i40":38,"i41":38};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -439,15 +439,17 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 void
 setOperationTimeout(int operationTimeout)
-Set timeout (millisecond) of each operation in this Table 
instance, will override the value
- of hbase.client.operation.timeout in configuration.
+Deprec

[19/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index b673124..8be3e73 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -883,481 +883,488 @@
 875
 876  /**
 877   * timeout for each RPC
-878   * @deprecated Use {@link 
#HBASE_RPC_READ_TIMEOUT_KEY} or {@link #HBASE_RPC_WRITE_TIMEOUT_KEY}
-879   * instead.
-880   */
-881  @Deprecated
-882  public static final String 
HBASE_RPC_TIMEOUT_KEY = "hbase.rpc.timeout";
-883
-884  /**
-885   * timeout for each read RPC
-886   */
-887  public static final String 
HBASE_RPC_READ_TIMEOUT_KEY = "hbase.rpc.read.timeout";
-888
-889  /**
-890   * timeout for each write RPC
-891   */
-892  public static final String 
HBASE_RPC_WRITE_TIMEOUT_KEY = "hbase.rpc.write.timeout";
-893
-894  /**
-895   * Default value of {@link 
#HBASE_RPC_TIMEOUT_KEY}
-896   */
-897  public static final int 
DEFAULT_HBASE_RPC_TIMEOUT = 6;
-898
-899  /**
-900   * timeout for short operation RPC
-901   */
-902  public static final String 
HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY =
-903  
"hbase.rpc.shortoperation.timeout";
-904
-905  /**
-906   * Default value of {@link 
#HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY}
-907   */
-908  public static final int 
DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT = 1;
-909
-910  /**
-911   * Value indicating the server name was 
saved with no sequence number.
-912   */
-913  public static final long NO_SEQNUM = 
-1;
-914
-915
-916  /*
-917   * cluster replication constants.
-918   */
-919  public static final String
-920  
REPLICATION_SOURCE_SERVICE_CLASSNAME = "hbase.replication.source.service";
-921  public static final String
-922  REPLICATION_SINK_SERVICE_CLASSNAME 
= "hbase.replication.sink.service";
-923  public static final String 
REPLICATION_SERVICE_CLASSNAME_DEFAULT =
-924
"org.apache.hadoop.hbase.replication.regionserver.Replication";
-925  public static final String 
REPLICATION_BULKLOAD_ENABLE_KEY = "hbase.replication.bulkload.enabled";
-926  public static final boolean 
REPLICATION_BULKLOAD_ENABLE_DEFAULT = false;
-927  /** Replication cluster id of source 
cluster which uniquely identifies itself with peer cluster */
-928  public static final String 
REPLICATION_CLUSTER_ID = "hbase.replication.cluster.id";
-929
-930  public static final String
-931  REPLICATION_SERIALLY_WAITING_KEY = 
"hbase.serial.replication.waitingMs";
-932  public static final long
-933  
REPLICATION_SERIALLY_WAITING_DEFAULT = 1;
-934
-935  /**
-936   * Directory where the source cluster 
file system client configuration are placed which is used by
-937   * sink cluster to copy HFiles from 
source cluster file system
-938   */
-939  public static final String 
REPLICATION_CONF_DIR = "hbase.replication.conf.dir";
+878   */
+879  public static final String 
HBASE_RPC_TIMEOUT_KEY = "hbase.rpc.timeout";
+880
+881  /**
+882   * timeout for each read RPC
+883   */
+884  public static final String 
HBASE_RPC_READ_TIMEOUT_KEY = "hbase.rpc.read.timeout";
+885
+886  /**
+887   * timeout for each write RPC
+888   */
+889  public static final String 
HBASE_RPC_WRITE_TIMEOUT_KEY = "hbase.rpc.write.timeout";
+890
+891  /**
+892   * Default value of {@link 
#HBASE_RPC_TIMEOUT_KEY}
+893   */
+894  public static final int 
DEFAULT_HBASE_RPC_TIMEOUT = 6;
+895
+896  /**
+897   * timeout for short operation RPC
+898   */
+899  public static final String 
HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY =
+900  
"hbase.rpc.shortoperation.timeout";
+901
+902  /**
+903   * Default value of {@link 
#HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY}
+904   */
+905  public static final int 
DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT = 1;
+906
+907  /**
+908   * Value indicating the server name was 
saved with no sequence number.
+909   */
+910  public static final long NO_SEQNUM = 
-1;
+911
+912
+913  /*
+914   * cluster replication constants.
+915   */
+916  public static final String
+917  
REPLICATION_SOURCE_SERVICE_CLASSNAME = "hbase.replication.source.service";
+918  public static final String
+919  REPLICATION_SINK_SERVICE_CLASSNAME 
= "hbase.replication.sink.service";
+920  public static final String 
REPLICATION_SERVICE_CLASSNAME_DEFAULT =
+921
"org.apache.hadoop.hbase.replication.regionserver.Replication";
+922  public static final String 
REPLICATION_BULKLOAD_ENABLE_KEY = "hbase.replication.bulkload.enabled";
+923  public static final boolean 
REPLICATION_BULKLOAD_ENABLE_DEFAULT = false;
+924  /** Replication cluster id of source 
cluster which uniquely identifies itself with peer cluster */
+925  public static final String 
REPLICATION_CLUSTER_ID = "hbase.replication.cluster.id";
+926
+927  public static final String
+9

[43/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HRegionInfo.html 
b/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
index 065ee61..6c5b214 100644
--- a/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
+++ b/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
@@ -1815,6 +1815,6 @@ 
public org.apache.hadoop.hbase.KeyValue.KVComparator Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HRegionLocation.html 
b/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
index 34bc057..81d1307 100644
--- a/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
+++ b/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
@@ -451,6 +451,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
index e374050..9fe40e4 100644
--- a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -2556,6 +2556,6 @@ public http://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/InvalidFamilyOperationException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/InvalidFamilyOperationException.html 
b/apidocs/org/apache/hadoop/hbase/InvalidFamilyOperationException.html
index 58d3c2f..e13abb0 100644
--- a/apidocs/org/apache/hadoop/hbase/InvalidFamilyOperationException.html
+++ b/apidocs/org/apache/hadoop/hbase/InvalidFamilyOperationException.html
@@ -318,6 +318,6 @@ extends 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index 89f2b7a..490b465 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -368,6 +368,6 @@ not permitted.)
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html 
b/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
index e9d803d..a949e6e 100644
--- a/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
+++ b/apidocs/org/apache/hadoop/hbase/LocalHBaseCluster.html
@@ -878,6 +878,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/MasterNotRunningException.html 
b/apidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
index 801e072..8766396 100644
--- a/apidocs/org/apache/hadoop/hbase/MasterNotRunningException.html
+++ b/apidocs/org/apache

[14/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html
index 899e364..c248583 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Scan.html
@@ -54,965 +54,1135 @@
 046/**
 047 * Used to perform Scan operations.
 048 * 

-049 * All operations are identical to {@link Get} with the exception of -050 * instantiation. Rather than specifying a single row, an optional startRow -051 * and stopRow may be defined. If rows are not specified, the Scanner will -052 * iterate over all rows. -053 *

-054 * To get all columns from all rows of a Table, create an instance with no constraints; use the -055 * {@link #Scan()} constructor. To constrain the scan to specific column families, -056 * call {@link #addFamily(byte[]) addFamily} for each family to retrieve on your Scan instance. -057 *

-058 * To get specific columns, call {@link #addColumn(byte[], byte[]) addColumn} -059 * for each column to retrieve. -060 *

-061 * To only retrieve columns within a specific range of version timestamps, -062 * call {@link #setTimeRange(long, long) setTimeRange}. -063 *

-064 * To only retrieve columns with a specific timestamp, call -065 * {@link #setTimeStamp(long) setTimestamp}. -066 *

-067 * To limit the number of versions of each column to be returned, call -068 * {@link #setMaxVersions(int) setMaxVersions}. -069 *

-070 * To limit the maximum number of values returned for each call to next(), -071 * call {@link #setBatch(int) setBatch}. -072 *

-073 * To add a filter, call {@link #setFilter(org.apache.hadoop.hbase.filter.Filter) setFilter}. -074 *

-075 * Expert: To explicitly disable server-side block caching for this scan, -076 * execute {@link #setCacheBlocks(boolean)}. -077 *

Note: Usage alters Scan instances. Internally, attributes are updated as the Scan -078 * runs and if enabled, metrics accumulate in the Scan instance. Be aware this is the case when -079 * you go to clone a Scan instance or if you go to reuse a created Scan instance; safer is create -080 * a Scan instance per usage. -081 */ -082@InterfaceAudience.Public -083@InterfaceStability.Stable -084public class Scan extends Query { -085 private static final Log LOG = LogFactory.getLog(Scan.class); -086 -087 private static final String RAW_ATTR = "_raw_"; -088 -089 private byte [] startRow = HConstants.EMPTY_START_ROW; -090 private byte [] stopRow = HConstants.EMPTY_END_ROW; -091 private int maxVersions = 1; -092 private int batch = -1; +049 * All operations are identical to {@link Get} with the exception of instantiation. Rather than +050 * specifying a single row, an optional startRow and stopRow may be defined. If rows are not +051 * specified, the Scanner will iterate over all rows. +052 *

+053 * To get all columns from all rows of a Table, create an instance with no constraints; use the +054 * {@link #Scan()} constructor. To constrain the scan to specific column families, call +055 * {@link #addFamily(byte[]) addFamily} for each family to retrieve on your Scan instance. +056 *

+057 * To get specific columns, call {@link #addColumn(byte[], byte[]) addColumn} for each column to +058 * retrieve. +059 *

+060 * To only retrieve columns within a specific range of version timestamps, call +061 * {@link #setTimeRange(long, long) setTimeRange}. +062 *

+063 * To only retrieve columns with a specific timestamp, call {@link #setTimeStamp(long) setTimestamp} +064 * . +065 *

+066 * To limit the number of versions of each column to be returned, call {@link #setMaxVersions(int) +067 * setMaxVersions}. +068 *

+069 * To limit the maximum number of values returned for each call to next(), call +070 * {@link #setBatch(int) setBatch}. +071 *

+072 * To add a filter, call {@link #setFilter(org.apache.hadoop.hbase.filter.Filter) setFilter}. +073 *

+074 * For small scan, it is deprecated in 2.0.0. Now we have a {@link #setLimit(int)} method in Scan +075 * object which is used to tell RS how many rows we want. If the rows return reaches the limit, the +076 * RS will close the RegionScanner automatically. And we will also fetch data when openScanner in +077 * the new implementation, this means we can also finish a scan operation in one rpc call. And we +078 * have also introduced a {@link #setReadType(ReadType)} method. You can use this method to tell RS +079 * to use pread explicitly. +080 *

+081 * Expert: To explicitly disable server-side block caching for this scan, execute +082 * {@link #setCacheBlocks(boolean)}. +083 *

+084 * Note: Usage alters Scan instances. Internally, attributes are updated as the Scan


[41/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 5a086a5..e442be7 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -219,6 +219,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/UnknownRegionException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/UnknownRegionException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/UnknownRegionException.html
index d940fe7..c6360d7 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/UnknownRegionException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/UnknownRegionException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/UnknownScannerException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/UnknownScannerException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/UnknownScannerException.html
index f642861..92045d3 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/UnknownScannerException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/UnknownScannerException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
index b99a496..4e03368 100644
--- 
a/apidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/classification/InterfaceAudience.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/classification/InterfaceAudience.html 
b/apidocs/org/apache/hadoop/hbase/classification/InterfaceAudience.html
index 3bafbcf..4b15a55 100644
--- a/apidocs/org/apache/hadoop/hbase/classification/InterfaceAudience.html
+++ b/apidocs/org/apache/hadoop/hbase/classification/InterfaceAudience.html
@@ -214,6 +214,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/classification/InterfaceStability.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/classification/InterfaceStability.html 
b/apidocs/org/apache/hadoop/hbase/classification/InterfaceStability.html
index e6438b9..9616ee7 100644
--- a/apidocs/org/apache/hadoop/hbase/classification/InterfaceStability.html
+++ b/apidocs/org/apache/hadoop/hbase/classification/InterfaceStability.html
@@ -249,6 +249,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/bl

[17/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
index f37f18d..c00f30a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
@@ -28,1810 +28,1959 @@
 020
 021import java.io.Closeable;
 022import java.io.IOException;
-023import java.util.List;
-024import java.util.Map;
-025import java.util.concurrent.Future;
-026import java.util.regex.Pattern;
-027
-028import 
org.apache.hadoop.conf.Configuration;
-029import 
org.apache.hadoop.hbase.Abortable;
-030import 
org.apache.hadoop.hbase.ClusterStatus;
-031import 
org.apache.hadoop.hbase.HColumnDescriptor;
-032import 
org.apache.hadoop.hbase.HRegionInfo;
-033import 
org.apache.hadoop.hbase.HTableDescriptor;
-034import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-035import 
org.apache.hadoop.hbase.NamespaceNotFoundException;
-036import 
org.apache.hadoop.hbase.ProcedureInfo;
-037import 
org.apache.hadoop.hbase.RegionLoad;
-038import 
org.apache.hadoop.hbase.ServerName;
-039import 
org.apache.hadoop.hbase.TableExistsException;
-040import 
org.apache.hadoop.hbase.TableName;
-041import 
org.apache.hadoop.hbase.TableNotFoundException;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-045import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-046import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-047import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-048import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-049import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-050import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-051import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-052import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-053import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-054import 
org.apache.hadoop.hbase.util.Pair;
-055
-056/**
-057 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
-058 * call {@link #close()} afterwards.
-059 * 

Admin can be used to create, drop, list, enable and disable tables, add and drop table -060 * column families and other administrative operations. -061 * -062 * @see ConnectionFactory -063 * @see Connection -064 * @see Table -065 * @since 0.99.0 -066 */ -067@InterfaceAudience.Public -068@InterfaceStability.Evolving -069public interface Admin extends Abortable, Closeable { -070 int getOperationTimeout(); -071 -072 @Override -073 void abort(String why, Throwable e); -074 -075 @Override -076 boolean isAborted(); -077 -078 /** -079 * @return Connection used by this object. -080 */ -081 Connection getConnection(); -082 -083 /** -084 * @param tableName Table to check. -085 * @return True if table exists already. -086 * @throws IOException -087 */ -088 boolean tableExists(final TableName tableName) throws IOException; -089 -090 /** -091 * List all the userspace tables. -092 * -093 * @return - returns an array of HTableDescriptors -094 * @throws IOException if a remote or network exception occurs -095 */ -096 HTableDescriptor[] listTables() throws IOException; -097 -098 /** -099 * List all the userspace tables matching the given pattern. -100 * -101 * @param pattern The compiled regular expression to match against -102 * @return - returns an array of HTableDescriptors -103 * @throws IOException if a remote or network exception occurs -104 * @see #listTables() -105 */ -106 HTableDescriptor[] listTables(Pattern pattern) throws IOException; -107 -108 /** -109 * List all the userspace tables matching the given regular expression. -110 * -111 * @param regex The regular expression to match against -112 * @return - returns an array of HTableDescriptors -113 * @throws IOException if a remote or network exception occurs -114 * @see #listTables(java.util.regex.Pattern) -115 */ -116 HTableDescriptor[] listTables(String regex) throws IOException; -117 -118 /** -119 * List all the tables matching the given pattern. -120 * -121 * @param pattern The compiled regular expression to match against -122 * @param includeSysTables False to match only against userspace tables -123 * @return - returns an array of HTableDescriptors -124 * @throws IOException if a remote or network exception occurs -125 * @see #listTables() -126 */ -127 HTableDescriptor[] listTables(Pattern pattern, boolean includeSysTables) -128 throws IOException; -129 -130 /** -131 * List all the tables matching the given patt


[18/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
index bd505e0..45cd59a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -1395,7 +1395,7 @@
 1387   * @return The list of co-processors 
classNames
 1388   */
 1389  public List 
getCoprocessors() {
-1390List result = new 
ArrayList();
+1390List result = new 
ArrayList(this.values.entrySet().size());
 1391Matcher keyMatcher;
 1392for (Map.Entry e 
: this.values.entrySet()) {
 1393  keyMatcher = 
HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e.getKey().get()));

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
index 97ffe8c..fcef767 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
@@ -106,7 +106,7 @@
 098  private byte [] bytes;
 099  public static final 
List EMPTY_SERVER_LIST = new 
ArrayList(0);
 100
-101  private ServerName(final String 
hostname, final int port, final long startcode) {
+101  protected ServerName(final String 
hostname, final int port, final long startcode) {
 102// Drop the domain is there is one; 
no need of it in a local cluster.  With it, we get long
 103// unwieldy names.
 104this.hostnameOnly = hostname;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceAudience.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceAudience.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceAudience.html
index 1348259..1fe4b96 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceAudience.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceAudience.html
@@ -58,7 +58,7 @@
 050   */
 051  @Documented
 052  @Retention(RetentionPolicy.RUNTIME)
-053  public @interface Public {};
+053  public @interface Public {}
 054
 055  /**
 056   * Intended only for the project(s) 
specified in the annotation.
@@ -68,14 +68,14 @@
 060  @Retention(RetentionPolicy.RUNTIME)
 061  public @interface LimitedPrivate {
 062String[] value();
-063  };
+063  }
 064
 065  /**
 066   * Intended for use only within Hadoop 
itself.
 067   */
 068  @Documented
 069  @Retention(RetentionPolicy.RUNTIME)
-070  public @interface Private {};
+070  public @interface Private {}
 071
 072  private InterfaceAudience() {} // 
Audience can't exist on its own
 073}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceStability.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceStability.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceStability.html
index dc4a0b5..c84148e 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceStability.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/classification/InterfaceStability.html
@@ -55,14 +55,14 @@
 047   */
 048  @Documented
 049  @Retention(RetentionPolicy.RUNTIME)
-050  public @interface Stable {};
+050  public @interface Stable {}
 051
 052  /**
 053   * Evolving, but can break 
compatibility at minor release (i.e. m.x)
 054   */
 055  @Documented
 056  @Retention(RetentionPolicy.RUNTIME)
-057  public @interface Evolving {};
+057  public @interface Evolving {}
 058
 059  /**
 060   * No guarantee is provided as to 
reliability or stability across any
@@ -70,7 +70,7 @@
 062   */
 063  @Documented
 064  @Retention(RetentionPolicy.RUNTIME)
-065  public @interface Unstable {};
+065  public @interface Unstable {}
 066}
 
 



[13/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
index 91a81ab..cddfbbc 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
@@ -120,547 +120,555 @@
 112   * @param actions list of Get, Put, 
Delete, Increment, Append objects
 113   * @param results Empty Object[], same 
size as actions. Provides access to partial
 114   *results, in case an 
exception is thrown. A null in the result array means that
-115   *the call for that 
action failed, even after retries
-116   * @throws IOException
-117   * @since 0.90.0
-118   */
-119  void batch(final List actions, final Object[] results) throws IOException,
-120InterruptedException;
-121
-122  /**
-123   * Same as {@link #batch(List, 
Object[])}, but with a callback.
-124   * @since 0.96.0
-125   */
-126   void batchCallback(
-127final List 
actions, final Object[] results, final Batch.Callback callback
-128  )
-129throws IOException, 
InterruptedException;
-130
-131  /**
-132   * Extracts certain cells from a given 
row.
-133   * @param get The object that specifies 
what data to fetch and from which row.
-134   * @return The data coming from the 
specified row, if it exists.  If the row
-135   * specified doesn't exist, the {@link 
Result} instance returned won't
-136   * contain any {@link 
org.apache.hadoop.hbase.KeyValue}, as indicated by {@link Result#isEmpty()}.
-137   * @throws IOException if a remote or 
network exception occurs.
-138   * @since 0.20.0
-139   */
-140  Result get(Get get) throws 
IOException;
-141
-142  /**
-143   * Extracts certain cells from the 
given rows, in batch.
-144   *
-145   * @param gets The objects that specify 
what data to fetch and from which rows.
-146   * @return The data coming from the 
specified rows, if it exists.  If the row specified doesn't
-147   * exist, the {@link Result} instance 
returned won't contain any {@link
-148   * org.apache.hadoop.hbase.KeyValue}, 
as indicated by {@link Result#isEmpty()}. If there are any
-149   * failures even after retries, there 
will be a null in the results array for those Gets, AND an
-150   * exception will be thrown.
-151   * @throws IOException if a remote or 
network exception occurs.
-152   * @since 0.90.0
-153   */
-154  Result[] get(List gets) 
throws IOException;
-155
-156  /**
-157   * Returns a scanner on the current 
table as specified by the {@link Scan}
-158   * object.
-159   * Note that the passed {@link Scan}'s 
start row and caching properties
-160   * maybe changed.
-161   *
-162   * @param scan A configured {@link 
Scan} object.
-163   * @return A scanner.
-164   * @throws IOException if a remote or 
network exception occurs.
-165   * @since 0.20.0
-166   */
-167  ResultScanner getScanner(Scan scan) 
throws IOException;
-168
-169  /**
-170   * Gets a scanner on the current table 
for the given family.
-171   *
-172   * @param family The column family to 
scan.
-173   * @return A scanner.
-174   * @throws IOException if a remote or 
network exception occurs.
-175   * @since 0.20.0
-176   */
-177  ResultScanner getScanner(byte[] family) 
throws IOException;
-178
-179  /**
-180   * Gets a scanner on the current table 
for the given family and qualifier.
-181   *
-182   * @param family The column family to 
scan.
-183   * @param qualifier The column 
qualifier to scan.
-184   * @return A scanner.
-185   * @throws IOException if a remote or 
network exception occurs.
-186   * @since 0.20.0
-187   */
-188  ResultScanner getScanner(byte[] family, 
byte[] qualifier) throws IOException;
-189
-190
-191  /**
-192   * Puts some data in the table.
-193   *
-194   * @param put The data to put.
-195   * @throws IOException if a remote or 
network exception occurs.
-196   * @since 0.20.0
-197   */
-198  void put(Put put) throws IOException;
-199
-200  /**
-201   * Puts some data in the table, in 
batch.
-202   * 

-203 * This can be used for group commit, or for submitting user defined -204 * batches. The writeBuffer will be periodically inspected while the List -205 * is processed, so depending on the List size the writeBuffer may flush -206 * not at all, or more than once. -207 * @param puts The list of mutations to apply. The batch put is done by -208 * aggregating the iteration of the Puts over the write buffer -209 * at the client-side for a single RPC call. -210 * @throws IOException if a remote or network exception occurs. -211 * @since 0.20.0 -212 */ -213 void put(List puts) throws IOException; -214 -215 /** -216 * Atomically checks if a row/family/qualifier value matches the expected


[20/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/serialized-form.html
--
diff --git a/apidocs/serialized-form.html b/apidocs/serialized-form.html
index 0bebdc6..25fbf83 100644
--- a/apidocs/serialized-form.html
+++ b/apidocs/serialized-form.html
@@ -201,6 +201,15 @@
 1728345723728342L
 
 
+
+
+
+Class org.apache.hadoop.hbase.ReplicationPeerNotFoundException
 extends DoNotRetryIOException implements 
Serializable
+
+serialVersionUID:
+1L
+
+
 
 
 
@@ -1045,6 +1054,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/ChoreService.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ChoreService.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ChoreService.html
index 28f13df..568793e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ChoreService.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ChoreService.html
@@ -356,7 +356,7 @@
 348  }
 349
 350  private void cancelAllChores(final 
boolean mayInterruptIfRunning) {
-351ArrayList 
choresToCancel = new ArrayList();
+351ArrayList 
choresToCancel = new 
ArrayList(scheduledChores.keySet().size());
 352// Build list of chores to cancel so 
we can iterate through a set that won't change
 353// as chores are cancelled. If we 
tried to cancel each chore while iterating through
 354// keySet the results would be 
undefined because the keySet would be changing

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
index e02b252..791cb88 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -26,29 +26,29 @@
 018 */
 019package org.apache.hadoop.hbase;
 020
-021import 
com.google.common.annotations.VisibleForTesting;
-022import 
com.google.common.base.Preconditions;
-023import java.io.IOException;
-024import java.util.Collections;
-025import java.util.HashMap;
-026import java.util.HashSet;
-027import java.util.Locale;
-028import java.util.Map;
-029import java.util.Set;
-030
-031import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-033import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-034import 
org.apache.hadoop.hbase.exceptions.HBaseException;
-035import 
org.apache.hadoop.hbase.io.compress.Compression;
-036import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+021import java.io.IOException;
+022import java.util.Collections;
+023import java.util.HashMap;
+024import java.util.HashSet;
+025import java.util.Locale;
+026import java.util.Map;
+027import java.util.Set;
+028
+029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+031import 
org.apache.hadoop.hbase.client.MobCompactPartitionPolicy;
+032import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+033import 
org.apache.hadoop.hbase.exceptions.HBaseException;
+034import 
org.apache.hadoop.hbase.io.compress.Compression;
+035import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+036import 
org.apache.hadoop.hbase.regionserver.BloomType;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-039import 
org.apache.hadoop.hbase.regionserver.BloomType;
-040import 
org.apache.hadoop.hbase.util.Bytes;
-041import 
org.apache.hadoop.hbase.util.PrettyPrinter;
-042import 
org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
-043
+039import 
org.apache.hadoop.hbase.util.Bytes;
+040import 
org.apache.hadoop.hbase.util.PrettyPrinter;
+041import 
org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
+042
+043import 
com.google.common.base.Preconditions;
 044
 045/**
 046 * An HColumnDescriptor contains 
information about a column family such as the
@@ -138,321 +138,321 @@
 130  public static final String 
MOB_THRESHOLD = "MOB_THRESHOLD";
 131  public static final byte[] 
MOB_THRESHOLD_BYTES = Bytes.toBytes(MOB_THRESHOLD);
 132  public static final long 
DEFAULT_MOB_THRESHOLD = 100 * 1024; // 100k
-133
-134  public static final String 
DFS_REPLICATION = "DFS_REPLICATION";
-135  public static final short 
DEFAULT_DFS_REPLIC

[45/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index 0d52eb0..5af1bed 100644
--- a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":9,"i38":10,"i39":10,"i40":9,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":9,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":9,"i40":10,"i41":10,"i42":9,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -278,23 +278,21 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 
-static boolean
-DEFAULT_IN_MEMORY_COMPACTION
-Default setting for whether to set the memstore of this 
column family as compacting or not.
-
-
-
 static KeepDeletedCells
 DEFAULT_KEEP_DELETED
 Default setting for preventing deleted from being collected 
immediately.
 
 
-
+
 static int
 DEFAULT_MIN_VERSIONS
 Default is not to keep a minimum of versions.
 
 
+
+static MobCompactPartitionPolicy
+DEFAULT_MOB_COMPACT_PARTITION_POLICY 
+
 
 static long
 DEFAULT_MOB_THRESHOLD 
@@ -374,6 +372,14 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+MOB_COMPACT_PARTITION_POLICY 
+
+
+static byte[]
+MOB_COMPACT_PARTITION_POLICY_BYTES 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 MOB_THRESHOLD 
 
 
@@ -396,6 +402,10 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+STORAGE_POLICY 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 TTL 
 
 
@@ -525,278 +535,305 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 
+MemoryCompactionPolicy
+getInMemoryCompaction() 
+
+
 KeepDeletedCells
 getKeepDeletedCells() 
 
-
+
 int
 getMaxVersions() 
 
-
+
 int
 getMinVersions() 
 
-
+
+MobCompactPartitionPolicy
+getMobCompactPartitionPolicy()
+Get the mob compact partition policy for this family
+
+
+
 long
 getMobThreshold()
 Gets the mob threshold of the family.
 
 
-
+
 byte[]
 getName() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getNameAsString() 
 
-
+
 int
 getScope() 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+getStoragePolicy()
+Return the storage policy in use by this family
+ 
+ Not using enum here because HDFS is not using enum 
for storage policy, see
+ org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite for 
more details
+
+
+
 int
 getTimeToLive() 
 
-
+
 static 
org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 getUnit(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key) 
 
-
+
 byte[]
 getValue(byte[] key) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=

[15/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
index bcd58ec..0c7491c 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
@@ -180,7 +180,7 @@
 172byte [] family = 
CellUtil.cloneFamily(kv);
 173List list = 
familyMap.get(family);
 174if (list == null) {
-175  list = new 
ArrayList();
+175  list = new 
ArrayList(1);
 176}
 177list.add(kv);
 178familyMap.put(family, list);
@@ -217,7 +217,7 @@
 209}
 210List list = 
familyMap.get(family);
 211if(list == null) {
-212  list = new 
ArrayList();
+212  list = new 
ArrayList(1);
 213} else if(!list.isEmpty()) {
 214  list.clear();
 215}
@@ -237,7 +237,7 @@
 229  public Delete addFamilyVersion(final 
byte [] family, final long timestamp) {
 230List list = 
familyMap.get(family);
 231if(list == null) {
-232  list = new 
ArrayList();
+232  list = new 
ArrayList(1);
 233}
 234list.add(new KeyValue(row, family, 
null, timestamp,
 235  
KeyValue.Type.DeleteFamilyVersion));
@@ -270,7 +270,7 @@
 262}
 263List list = 
familyMap.get(family);
 264if (list == null) {
-265  list = new 
ArrayList();
+265  list = new 
ArrayList(1);
 266}
 267list.add(new KeyValue(this.row, 
family, qualifier, timestamp,
 268KeyValue.Type.DeleteColumn));
@@ -305,7 +305,7 @@
 297}
 298List list = 
familyMap.get(family);
 299if(list == null) {
-300  list = new 
ArrayList();
+300  list = new 
ArrayList(1);
 301}
 302KeyValue kv = new KeyValue(this.row, 
family, qualifier, timestamp, KeyValue.Type.Delete);
 303list.add(kv);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
index d917b20..2a44859 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
@@ -408,7 +408,7 @@
 400  @Override
 401  public Map 
getFingerprint() {
 402Map map = new 
HashMap();
-403List families = new 
ArrayList();
+403List families = new 
ArrayList(this.familyMap.entrySet().size());
 404map.put("families", families);
 405for (Map.Entry> entry :
 406  this.familyMap.entrySet()) {
@@ -436,7 +436,7 @@
 428map.put("row", 
Bytes.toStringBinary(this.row));
 429map.put("maxVersions", 
this.maxVersions);
 430map.put("cacheBlocks", 
this.cacheBlocks);
-431List timeRange = new 
ArrayList();
+431List timeRange = new 
ArrayList(2);
 432timeRange.add(this.tr.getMin());
 433timeRange.add(this.tr.getMax());
 434map.put("timeRange", timeRange);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
index 0b439da..0859dfa 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
@@ -451,7 +451,7 @@
 443private final AtomicInteger 
retryInQueue = new AtomicInteger(0);
 444private final int writeRpcTimeout; // 
needed to pass in through AsyncProcess constructor
 445private final int operationTimeout;
-446
+446private final ExecutorService pool;
 447public FlushWorker(Configuration 
conf, ClusterConnection conn, HRegionLocation addr,
 448HTableMultiplexer 
htableMultiplexer, int perRegionServerBufferQueueSize,
 449ExecutorService pool, 
ScheduledExecutorService executor) {
@@ -465,10 +465,10 @@
 457  
HConstants.DEFAULT_HBASE_RPC_TIMEOUT));
 458  this.operationTimeout = 
conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
 459  
HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
-460  this.ap = new AsyncProcess(conn, 
conf, pool, rpcCallerFactory, false, rpcControllerFactory,
-461  writeRpcTimeout, 
op

[28/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSnapshotInputFormat.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSnapshotInputFormat.html
 
b/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSnapshotInputFormat.html
index 3fa1b88..83db7ea 100644
--- 
a/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSnapshotInputFormat.html
+++ 
b/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSnapshotInputFormat.html
@@ -167,6 +167,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSplit.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSplit.html 
b/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSplit.html
index 6742751..ba6dd98 100644
--- a/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSplit.html
+++ b/apidocs/org/apache/hadoop/hbase/mapred/class-use/TableSplit.html
@@ -163,6 +163,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapred/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapred/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/mapred/package-summary.html
index 52739b2..e9fc5fc 100644
--- a/apidocs/org/apache/hadoop/hbase/mapred/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/mapred/package-summary.html
@@ -43,7 +43,7 @@
 
 
 
-Prev Package
+Prev Package
 Next Package
 
 
@@ -231,7 +231,7 @@ in the HBase Reference Guide for mapreduce over hbase 
documentation.
 
 
 
-Prev Package
+Prev Package
 Next Package
 
 
@@ -257,6 +257,6 @@ in the HBase Reference Guide for mapreduce over hbase 
documentation.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapred/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapred/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/mapred/package-tree.html
index 797c935..9fa1eba 100644
--- a/apidocs/org/apache/hadoop/hbase/mapred/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/mapred/package-tree.html
@@ -43,7 +43,7 @@
 
 
 
-Prev
+Prev
 Next
 
 
@@ -177,7 +177,7 @@
 
 
 
-Prev
+Prev
 Next
 
 
@@ -203,6 +203,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapred/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapred/package-use.html 
b/apidocs/org/apache/hadoop/hbase/mapred/package-use.html
index e580d39..970c839 100644
--- a/apidocs/org/apache/hadoop/hbase/mapred/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/mapred/package-use.html
@@ -183,6 +183,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/CellCounter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/CellCounter.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/CellCounter.html
index 9953561..9b8a68f 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/CellCounter.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/CellCounter.html
@@ -250,7 +250,7 @@ implements org.apache.hadoop.util.Tool
 
 
 createSubmittableJob
-public static org.apache.hadoop.mapreduce.Job createSubmittableJob(org.apache.hadoop.

[09/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/TimestampsFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/TimestampsFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/TimestampsFilter.html
index d00a4b0..fa99228 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/TimestampsFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/TimestampsFilter.html
@@ -165,7 +165,7 @@
 157  }
 158
 159  public static Filter 
createFilterFromArguments(ArrayList filterArguments) {
-160ArrayList timestamps = 
new ArrayList();
+160ArrayList timestamps = 
new ArrayList(filterArguments.size());
 161for (int i = 0; 
i
-032 * Evaluated according to minStamp 
<= timestamp < maxStamp
-033 * or [minStamp,maxStamp) in interval 
notation.
-034 * 

-035 * Only used internally; should not be accessed directly by clients. -036 *

Immutable. Thread-safe. -037 */ -038@InterfaceAudience.Public -039@InterfaceStability.Stable -040public class TimeRange { -041 public static final long INITIAL_MIN_TIMESTAMP = 0L; -042 public static final long INITIAL_MAX_TIMESTAMP = Long.MAX_VALUE; -043 private final long minStamp; -044 private final long maxStamp; -045 private final boolean allTime; -046 -047 /** -048 * Default constructor. -049 * Represents interval [0, Long.MAX_VALUE) (allTime) -050 * @deprecated This is made @InterfaceAudience.Private in the 2.0 line and above -051 */ -052 @Deprecated -053 public TimeRange() { -054this(INITIAL_MIN_TIMESTAMP, INITIAL_MAX_TIMESTAMP); -055 } -056 -057 /** -058 * Represents interval [minStamp, Long.MAX_VALUE) -059 * @param minStamp the minimum timestamp value, inclusive -060 * @deprecated This is made @InterfaceAudience.Private in the 2.0 line and above -061 */ -062 @Deprecated -063 public TimeRange(long minStamp) { -064this(minStamp, INITIAL_MAX_TIMESTAMP); -065 } -066 -067 /** -068 * Represents interval [minStamp, Long.MAX_VALUE) -069 * @param minStamp the minimum timestamp value, inclusive -070 * @deprecated This is made @InterfaceAudience.Private in the 2.0 line and above -071 */ -072 @Deprecated -073 public TimeRange(byte [] minStamp) { -074this(Bytes.toLong(minStamp)); -075 } -076 -077 /** -078 * Represents interval [minStamp, maxStamp) -079 * @param minStamp the minimum timestamp, inclusive -080 * @param maxStamp the maximum timestamp, exclusive -081 * @deprecated This is made @InterfaceAudience.Private in the 2.0 line and above -082 */ -083 @Deprecated -084 public TimeRange(byte [] minStamp, byte [] maxStamp) { -085this(Bytes.toLong(minStamp), Bytes.toLong(maxStamp)); -086 } -087 -088 /** -089 * Represents interval [minStamp, maxStamp) -090 * @param minStamp the minimum timestamp, inclusive -091 * @param maxStamp the maximum timestamp, exclusive -092 * @throws IllegalArgumentException if either <0, -093 * @deprecated This is made @InterfaceAudience.Private in the 2.0 line and above -094 */ -095 @Deprecated -096 public TimeRange(long minStamp, long maxStamp) { -097check(minStamp, maxStamp); -098this.minStamp = minStamp; -099this.maxStamp = maxStamp; -100this.allTime = isAllTime(minStamp, maxStamp); -101 } -102 -103 private static boolean isAllTime(long minStamp, long maxStamp) { -104return minStamp == INITIAL_MIN_TIMESTAMP && maxStamp == INITIAL_MAX_TIMESTAMP; -105 } -106 -107 private static void check(long minStamp, long maxStamp) { -108if (minStamp < 0 || maxStamp < 0) { -109 throw new IllegalArgumentException("Timestam


[33/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 8b52361..65b5bc7 100644
--- a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":42,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":42,"i13":10,"i14":10,"i15":41,"i16":10,"i17":10,"i18":10,"i19":42,"i20":10,"i21":10};
+var methods = 
{"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":41,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":42};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -112,17 +112,19 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true";
 title="class or interface in java.lang">AutoCloseable
 
 
+Deprecated. 
+use Admin 
instead.
+
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class ReplicationAdmin
+ http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public class ReplicationAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 
  This class provides the administrative interface to HBase cluster
- replication. In order to use it, the cluster and the client using
- ReplicationAdmin must be configured with hbase.replication
- set to true.
+ replication.
  
  
  Adding a new peer results in creating new outbound connections from every
@@ -159,23 +161,33 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-CFNAME 
+CFNAME
+Deprecated. 
+ 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REPLICATIONGLOBAL 
+REPLICATIONGLOBAL
+Deprecated. 
+ 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REPLICATIONSERIAL 
+REPLICATIONSERIAL
+Deprecated. 
+ 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REPLICATIONTYPE 
+REPLICATIONTYPE
+Deprecated. 
+ 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-TNAME 
+TNAME
+Deprecated. 
+ 
 
 
 
@@ -193,6 +205,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 ReplicationAdmin(org.apache.hadoop.conf.Configuration conf)
+Deprecated. 
 Constructor that creates a connection to the local 
ZooKeeper ensemble.
 
 
@@ -215,6 +228,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 void
 addPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
ReplicationPeerConfig peerConfig)
+Deprecated. 
 Add a new remote slave cluster for replication.
 
 
@@ -233,6 +247,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 void
 appendPeerTableCFs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String id,
   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCfs)
+Deprecated. 
 Append the replicable table-cf config of the specified 
peer
 
 
@@ 

[27/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
index 6ec3893..3623778 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
@@ -433,6 +433,6 @@ extends org.apache.hadoop.mapreduce.InputFormatThe Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
index 1e22e19..38daf0b 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
@@ -424,6 +424,6 @@ extends org.apache.hadoop.mapreduce.OutputFormatThe Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.html
index 971b0be..5a696a0 100644
--- 
a/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.html
+++ 
b/apidocs/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.html
@@ -360,6 +360,6 @@ extends Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/MutationSerialization.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/MutationSerialization.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/MutationSerialization.html
index cc2c100..88d5232 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/MutationSerialization.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/MutationSerialization.html
@@ -312,6 +312,6 @@ implements 
org.apache.hadoop.io.serializer.SerializationThe Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/PutCombiner.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
index a00204a..9dbf641 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
@@ -302,6 +302,6 @@ extends org.apache.hadoop.mapreduce.ReducerThe Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
index bc727b6..e84018c 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/PutSortReducer.html
@@ -325,6 +325,6 @@ extends org.apache.hadoop.mapreduce.ReducerThe Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/mapreduce/ResultSerialization.html
-

[52/52] hbase-site git commit: Empty commit

2017-02-05 Thread tedyu
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/2f960d3f
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/2f960d3f
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/2f960d3f

Branch: refs/heads/asf-site
Commit: 2f960d3f3793a238ae6ca880033aa9018e80aa0b
Parents: 62e361e
Author: tedyu 
Authored: Sun Feb 5 07:48:17 2017 -0800
Committer: tedyu 
Committed: Sun Feb 5 07:48:17 2017 -0800

--

--




[26/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/package-frame.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/package-frame.html 
b/apidocs/org/apache/hadoop/hbase/package-frame.html
index 7a245a3..3f6fe3f 100644
--- a/apidocs/org/apache/hadoop/hbase/package-frame.html
+++ b/apidocs/org/apache/hadoop/hbase/package-frame.html
@@ -42,6 +42,7 @@
 Enums
 
 KeepDeletedCells
+MemoryCompactionPolicy
 ProcedureState
 
 Exceptions
@@ -62,6 +63,7 @@
 PleaseHoldException
 RegionException
 RegionTooBusyException
+ReplicationPeerNotFoundException
 RetryImmediatelyException
 TableExistsException
 TableInfoMissingException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/package-summary.html
index 150f193..84f1b3d 100644
--- a/apidocs/org/apache/hadoop/hbase/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/package-summary.html
@@ -248,6 +248,12 @@
 
 
 
+MemoryCompactionPolicy
+
+Enum describing all possible memory compaction 
policies
+
+
+
 ProcedureState
 
 POJO representing Procedure State
@@ -366,52 +372,58 @@
 
 
 
+ReplicationPeerNotFoundException
+
+Thrown when a replication peer can not be found
+
+
+
 RetryImmediatelyException
  
 
-
+
 TableExistsException
 
 Thrown when a table exists but should not
 
 
-
+
 TableInfoMissingException
 
 Failed to find .tableinfo file under table dir
 
 
-
+
 TableNotDisabledException
 
 Thrown if a table should be offline but is not
 
 
-
+
 TableNotEnabledException
 
 Thrown if a table should be enabled but is not
 
 
-
+
 TableNotFoundException
 
 Thrown when a table can not be located
 
 
-
+
 UnknownRegionException
 
 Thrown when we are asked to operate on a region we know 
nothing about.
 
 
-
+
 UnknownScannerException
 
 Thrown if a region server is passed an unknown scanner 
id.
 
 
-
+
 ZooKeeperConnectionException
 
 Thrown if the client can't connect to zookeeper
@@ -469,6 +481,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/package-tree.html
index 77ca49c..3e0a4c4 100644
--- a/apidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/package-tree.html
@@ -127,6 +127,7 @@
 org.apache.hadoop.hbase.NamespaceExistException
 org.apache.hadoop.hbase.NamespaceNotFoundException
 org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException
+org.apache.hadoop.hbase.ReplicationPeerNotFoundException
 org.apache.hadoop.hbase.TableExistsException
 org.apache.hadoop.hbase.TableNotDisabledException
 org.apache.hadoop.hbase.TableNotEnabledException
@@ -174,6 +175,7 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.KeepDeletedCells
+org.apache.hadoop.hbase.MemoryCompactionPolicy
 org.apache.hadoop.hbase.ProcedureState
 
 
@@ -228,6 +230,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/package-use.html 
b/apidocs/org/apache/hadoop/hbase/package-use.html
index 56ad6e3..cbf32f0 100644
--- a/apidocs/org/apache/hadoop/hbase/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/package-use.html
@@ -223,52 +223,57 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+MemoryCompactionPolicy
+Enum describing all possible memory compaction 
policies
+
+
+
 NamespaceDescriptor
 Namespace POJO class.
 
 
-
+
 NamespaceDescriptor.Builder 
 
-
+
 ProcedureInfo
 Procedure information
 
 
-
+
 ProcedureState
 POJO representing Procedure State
 
 
-
+
 RegionLoad
 Encapsulates per-region load metrics.
 
 
-
+
 R

[23/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-summary.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-summary.html
 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-summary.html
index c47e79d..eeaa8f8 100644
--- 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-summary.html
+++ 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-summary.html
@@ -119,6 +119,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-tree.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-tree.html
 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-tree.html
index 8dd25ba..5502b2b 100644
--- 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-tree.html
+++ 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-tree.html
@@ -123,6 +123,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-use.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-use.html
 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-use.html
index 2abdcac..a91aba2 100644
--- 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-use.html
+++ 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/package-use.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-summary.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-summary.html
 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-summary.html
index 5f8680a..03dfe83 100644
--- 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-summary.html
+++ 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-summary.html
@@ -119,6 +119,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-tree.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-tree.html
index 7ee5423..b9bf41d 100644
--- 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-tree.html
+++ 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-tree.html
@@ -123,6 +123,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-use.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-use.html 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-use.html
index b517410..6f42c39 100644
--- 
a/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-use.html
+++ 
b/apidocs/org/apache/hadoop/hbase/shaded/com/google/protobuf/package-use.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apach

[39/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/Append.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Append.html 
b/apidocs/org/apache/hadoop/hbase/client/Append.html
index d5953c8..3129b69 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Append.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Append.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -639,7 +639,7 @@ extends 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -679,6 +679,6 @@ extends 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/AsyncConnection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncConnection.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncConnection.html
index 6138f22..dd2883d 100644
--- a/apidocs/org/apache/hadoop/hbase/client/AsyncConnection.html
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncConnection.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6,"i3":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var methods = {"i0":6,"i1":6,"i2":18,"i3":6,"i4":6,"i5":18,"i6":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -123,36 +123,55 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 Method Summary
 
-All Methods Instance Methods Abstract Methods 
+All Methods Instance Methods Abstract Methods Default Methods 
 
 Modifier and Type
 Method and Description
 
 
+AsyncAdmin
+getAdmin()
+Retrieve an AsyncAdmin implementation to administer an 
HBase cluster.
+
+
+
 org.apache.hadoop.conf.Configuration
 getConfiguration()
 Returns the Configuration object used by this 
instance.
 
 
-
-RawAsyncTable
+
+default RawAsyncTable
 getRawTable(TableName tableName)
-Retrieve an RawAsyncTable implementation for accessing a 
table.
+Retrieve an RawAsyncTable implementation 
for accessing a table.
 
 
-
+
+AsyncTableBuilder
+getRawTableBuilder(TableName tableName)
+Returns an AsyncTableBuilder for creating 
RawAsyncTable.
+
+
+
 AsyncTableRegionLocator
 getRegionLocator(TableName tableName)
 Retrieve a AsyncRegionLocator implementation to inspect 
region information on a table.
 
 
-
-AsyncTable
+
+default AsyncTable
 getTable(TableName tableName,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
 Retrieve an AsyncTable implementation for accessing a 
table.
 
 
+
+AsyncTableBuilder
+getTableBuilder(TableName tableName,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool)
+Returns an AsyncTableBuilder for creating 
AsyncTable.
+
+
 
 
 
@@ -212,10 +231,11 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getRawTable
-RawAsyncTable getRawTable(TableName tableName)
-Retrieve an RawAsyncTable implementation for accessing a 
table. The returned Table is not
- thread safe, a new instance should be created for each using thread. This is 
a lightweight
- operation, pooling or caching of the returned AsyncTable is neither required 
nor desired.
+default RawAsyncTable getRawTable(TableName tableName)
+Retrieve an RawAsyncTable implementation 
for accessing a table.
+ 
+ The returned instance will use default configs. Use getRawTableBuilder(TableName)
 if you
+ want to customize some configs.
  
  This method no longer checks table existence. An exception will be thrown if 
the table does not
  exist only when the first operation is attempted.
@@ -224,20 +244,37 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 tableName - the name of the table
 Returns:
 an RawAsyncTable to use for interactions with this table
+See Also:
+getRawTableBuilder(TableName)
+
+
+
+
+
+
+
+
+getRawTableBuilder
+AsyncTableBuilder getRawTableBuilder(TableName tableName)
+Returns an AsyncTableBuilder for creating 
RawAsyncTable.
+ 
+ This method no longer checks table existence. An exception will be thrown if 
the table does not
+ exist only when the first operati

[05/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
index 1f612d6..05af383 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
@@ -169,8 +169,8 @@
 161  addColumns(scan, 
conf.get(SCAN_COLUMNS));
 162}
 163
-164if (conf.get(SCAN_COLUMN_FAMILY) != 
null) {
-165  
scan.addFamily(Bytes.toBytes(conf.get(SCAN_COLUMN_FAMILY)));
+164for (String columnFamily : 
conf.getTrimmedStrings(SCAN_COLUMN_FAMILY)) {
+165  
scan.addFamily(Bytes.toBytes(columnFamily));
 166}
 167
 168if (conf.get(SCAN_TIMESTAMP) != null) 
{

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
index 5feedb3..a10017d 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
@@ -815,7 +815,7 @@
 807  throw new 
IllegalArgumentException("Must provide a configuration object.");
 808}
 809Set paths = new 
HashSet(conf.getStringCollection("tmpjars"));
-810if (paths.size() == 0) {
+810if (paths.isEmpty()) {
 811  throw new 
IllegalArgumentException("Configuration contains no tmpjars.");
 812}
 813StringBuilder sb = new 
StringBuilder();

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
index add6a1e..3203468 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
@@ -51,78 +51,89 @@
 043  private final Map 
configuration;
 044  private Map> tableCFsMap = null;
 045  private Set namespaces = 
null;
-046
-047  public ReplicationPeerConfig() {
-048this.peerData = new 
TreeMap(Bytes.BYTES_COMPARATOR);
-049this.configuration = new 
HashMap(0);
-050  }
-051
-052  /**
-053   * Set the clusterKey which is the 
concatenation of the slave cluster's:
-054   *  
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
-055   */
-056  public ReplicationPeerConfig 
setClusterKey(String clusterKey) {
-057this.clusterKey = clusterKey;
-058return this;
-059  }
-060
-061  /**
-062   * Sets the ReplicationEndpoint plugin 
class for this peer.
-063   * @param replicationEndpointImpl a 
class implementing ReplicationEndpoint
-064   */
-065  public ReplicationPeerConfig 
setReplicationEndpointImpl(String replicationEndpointImpl) {
-066this.replicationEndpointImpl = 
replicationEndpointImpl;
-067return this;
-068  }
-069
-070  public String getClusterKey() {
-071return clusterKey;
-072  }
-073
-074  public String 
getReplicationEndpointImpl() {
-075return replicationEndpointImpl;
-076  }
-077
-078  public Map 
getPeerData() {
-079return peerData;
-080  }
-081
-082  public Map 
getConfiguration() {
-083return configuration;
-084  }
-085
-086  public Map> getTableCFsMap() {
-087return (Map>) tableCFsMap;
-088  }
-089
-090  public ReplicationPeerConfig 
setTableCFsMap(Map> tableCFsMap) {
-092this.tableCFsMap = tableCFsMap;
-093return this;
-094  }
-095
-096  public Set 
getNamespaces() {
-097return this.namespaces;
-098  }
-099
-100  public ReplicationPeerConfig 
setNamespaces(Set namespaces) {
-101this.namespaces = namespaces;
-102return this;
-103  }
-104
-105  @Override
-106  public String toString() {
-107StringBuilder builder = new 
StringBuilder("clusterKey=").append(clusterKey).append(",");
-108
builder.append("replicationEndpointImpl=").append(replicationEndpointImpl).append(",");
-109if (namespaces != null) {
-110  
builder.append("namespaces=").append(namespaces.toString()).append(",")

[42/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/MultiActionResultTooLarge.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/MultiActionResultTooLarge.html 
b/apidocs/org/apache/hadoop/hbase/class-use/MultiActionResultTooLarge.html
index 2d1b271..5735cf7 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/MultiActionResultTooLarge.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/MultiActionResultTooLarge.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.Builder.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.Builder.html 
b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.Builder.html
index e8a6dc5..00544b3 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.Builder.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.Builder.html
@@ -177,6 +177,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
index 69ebed4..5663df6 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
@@ -268,6 +268,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/NamespaceExistException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceExistException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceExistException.html
index 2907b69..0184be6 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceExistException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceExistException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/NamespaceNotFoundException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceNotFoundException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceNotFoundException.html
index c4cd3de..115c9be 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/NamespaceNotFoundException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/NamespaceNotFoundException.html
@@ -164,6 +164,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
 
b/apidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
index feae5c8..7243b11 100644
--- 
a/apidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/class-use/NotServingRegionException.html
--
dif

[08/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellCounter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellCounter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellCounter.html
index 8ca5756..aae49fe 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellCounter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CellCounter.html
@@ -139,206 +139,208 @@
 131 */
 132
 133@Override
-134public void 
map(ImmutableBytesWritable row, Result values,
-135Context context)
-136throws IOException {
-137  Preconditions.checkState(values != 
null,
-138  "values passed to the map is 
null");
-139
-140  try {
-141byte[] currentRow = 
values.getRow();
-142if (lastRow == null || 
!Bytes.equals(lastRow, currentRow)) {
-143  lastRow = currentRow;
-144  currentRowKey = 
Bytes.toStringBinary(currentRow);
-145  currentFamily = null;
-146  currentQualifier = null;
-147  
context.getCounter(Counters.ROWS).increment(1);
-148  context.write(new Text("Total 
ROWS"), new IntWritable(1));
-149}
-150if (!values.isEmpty()) {
-151  int cellCount = 0;
-152  for (Cell value : 
values.listCells()) {
-153cellCount++;
-154if (currentFamily == null || 
!CellUtil.matchingFamily(value, currentFamily)) {
-155  currentFamily = 
CellUtil.cloneFamily(value);
-156  currentFamilyName = 
Bytes.toStringBinary(currentFamily);
-157  currentQualifier = null;
-158  context.getCounter("CF", 
currentFamilyName).increment(1);
-159  if (1 == 
context.getCounter("CF", currentFamilyName).getValue()) {
-160context.write(new 
Text("Total Families Across all Rows"), new IntWritable(1));
-161context.write(new 
Text(currentFamily), new IntWritable(1));
-162  }
-163}
-164if (currentQualifier == null 
|| !CellUtil.matchingQualifier(value, currentQualifier)) {
-165  currentQualifier = 
CellUtil.cloneQualifier(value);
-166  currentQualifierName = 
currentFamilyName + separator +
-167  
Bytes.toStringBinary(currentQualifier);
-168  currentRowQualifierName = 
currentRowKey + separator + currentQualifierName;
-169
-170  context.write(new 
Text("Total Qualifiers across all Rows"),
-171  new IntWritable(1));
-172  context.write(new 
Text(currentQualifierName), new IntWritable(1));
-173}
-174// Increment versions
-175context.write(new 
Text(currentRowQualifierName + "_Versions"), new IntWritable(1));
-176  }
-177  
context.getCounter(Counters.CELLS).increment(cellCount);
-178}
-179  } catch (InterruptedException e) 
{
-180e.printStackTrace();
-181  }
-182}
-183  }
-184
-185  static class IntSumReducer 
extends Reducer {
-187
-188private IntWritable result = new 
IntWritable();
-189public void reduce(Key key, 
Iterable values,
-190  Context context)
-191throws IOException, 
InterruptedException {
-192  int sum = 0;
-193  for (IntWritable val : values) {
-194sum += val.get();
-195  }
-196  result.set(sum);
-197  context.write(key, result);
-198}
-199  }
-200
-201  /**
-202   * Sets up the actual job.
-203   *
-204   * @param conf The current 
configuration.
-205   * @param args The command line 
parameters.
-206   * @return The newly created job.
-207   * @throws IOException When setting up 
the job fails.
-208   */
-209  public static Job 
createSubmittableJob(Configuration conf, String[] args)
-210  throws IOException {
-211String tableName = args[0];
-212Path outputDir = new Path(args[1]);
-213String reportSeparatorString = 
(args.length > 2) ? args[2]: ":";
-214conf.set("ReportSeparator", 
reportSeparatorString);
-215Job job = Job.getInstance(conf, 
conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName));
-216
job.setJarByClass(CellCounter.class);
-217Scan scan = 
getConfiguredScanForJob(conf, args);
-218
TableMapReduceUtil.initTableMapperJob(tableName, scan,
-219CellCounterMapper.class, 
ImmutableBytesWritable.class, Result.class, job);
-220job.setNumReduceTasks(1);
-221
job.setMapOutputKeyClass(Text.class);
-222
job.setMapOutputValueClass(IntWritable.class);
-223
job.setOutputFormatClass(TextOutputFormat.class);
-224job.setOutputKeyClass(Text.class);
-225
job.setOutputValueClass(IntWritable.class);
-226FileOutputFormat.setOutputPath(job, 
outputDir);
-227
job.setReducerClass(IntSumReducer.class);
-228return job;
-229  }
-23

[37/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/Scan.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Scan.html 
b/apidocs/org/apache/hadoop/hbase/client/Scan.html
index 323ac7e..767c23c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Scan.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Scan.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":42,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":42,"i61":42,"i62":42,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
 
 
 Summary: 
-Nested | 
+Nested | 
 Field | 
 Constr | 
 Method
@@ -130,48 +130,73 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Stable
-public class Scan
+public class Scan
 extends Query
 Used to perform Scan operations.
  
- All operations are identical to Get with the exception of
- instantiation.  Rather than specifying a single row, an optional startRow
- and stopRow may be defined.  If rows are not specified, the Scanner will
- iterate over all rows.
+ All operations are identical to Get with the exception of 
instantiation. Rather than
+ specifying a single row, an optional startRow and stopRow may be defined. If 
rows are not
+ specified, the Scanner will iterate over all rows.
  
  To get all columns from all rows of a Table, create an instance with no 
constraints; use the
- Scan()
 constructor. To constrain the scan to specific column families,
- call addFamily
 for each family to retrieve on your Scan instance.
+ Scan()
 constructor. To constrain the scan to specific column families, call
+ addFamily
 for each family to retrieve on your Scan instance.
  
- To get specific columns, call addColumn
- for each column to retrieve.
+ To get specific columns, call addColumn
 for each column to
+ retrieve.
  
- To only retrieve columns within a specific range of version timestamps,
- call setTimeRange.
+ To only retrieve columns within a specific range of version timestamps, call
+ setTimeRange.
  
- To only retrieve columns with a specific timestamp, call
- setTimestamp.
+ To only retrieve columns with a specific timestamp, call setTimestamp
+ .
  
- To limit the number of versions of each column to be returned, call
- setMaxVersions.
+ To limit the number of versions of each column to be returned, call setMaxVersions.
  
- To limit the maximum number of values returned for each call to next(),
- call setBatch.
+ To limit the maximum number of values returned for each call to next(), call
+ setBatch.
  
  To add a filter, call setFilter.
  
- Expert: To explicitly disable server-side block caching for this scan,
- execute setCacheBlocks(boolean).
- Note: Usage alters Scan instances. Internally, attributes are 
updated as the Scan
- runs and if enabled, metrics accumulate in the Scan instance. Be aware this 
is the case when
- you go to clone a Scan instance or if you go to reuse a created Scan 
instance; safer is create
- a Scan instance per usage.
+ For small scan, it is deprecated in 2.0.0. Now we have a setLimit(int)
 method in Scan
+ object which is used to tell RS how many rows we want. If the rows return 
reaches the limit, the
+ RS will close the RegionScanner automatically. And we will also fetch data 
when openScanner in
+ the new implementation, this means we can also finish a scan operation in one 
rpc call. And we
+ ha

[21/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/util/class-use/FastLongHistogram.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/FastLongHistogram.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/FastLongHistogram.html
deleted file mode 100644
index c7f1133..000
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/FastLongHistogram.html
+++ /dev/null
@@ -1,167 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-Uses of Class org.apache.hadoop.hbase.util.FastLongHistogram (Apache 
HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.util.FastLongHistogram
-
-
-
-
-
-Packages that use FastLongHistogram 
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.util
- 
-
-
-
-
-
-
-
-
-
-Uses of FastLongHistogram in org.apache.hadoop.hbase.util
-
-Methods in org.apache.hadoop.hbase.util
 that return FastLongHistogram 
-
-Modifier and Type
-Method and Description
-
-
-
-FastLongHistogram
-FastLongHistogram.reset()
-Resets the histogram for new counting.
-
-
-
-
-
-
-
-
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-
-
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/util/class-use/FileSystemVersionException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/FileSystemVersionException.html
 
b/apidocs/org/apache/hadoop/hbase/util/class-use/FileSystemVersionException.html
index 210883d..e8af69a 100644
--- 
a/apidocs/org/apache/hadoop/hbase/util/class-use/FileSystemVersionException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/util/class-use/FileSystemVersionException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/util/class-use/JsonMapper.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/JsonMapper.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/JsonMapper.html
index 9fb1539..3ccad0d 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/JsonMapper.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/JsonMapper.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/util/class-use/LeaseNotRecoveredException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/LeaseNotRecoveredException.html
 
b/apidocs/org/apache/hadoop/hbase/util/class-use/LeaseNotRecoveredException.html
index 08c478c..8e7e2b3 100644
--- 
a/apidocs/org/apache/hadoop/hbase/util/class-use/LeaseNotRecoveredException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/util/class-use/LeaseNotRecoveredException.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/util/class-use/MD5Hash.html
--
diff --git a/apidocs/org/apache/hadoo

[29/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/io/util/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/io/util/package-use.html 
b/apidocs/org/apache/hadoop/hbase/io/util/package-use.html
index 06bcdbf..150a95e 100644
--- a/apidocs/org/apache/hadoop/hbase/io/util/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/io/util/package-use.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ipc/BadAuthException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ipc/BadAuthException.html 
b/apidocs/org/apache/hadoop/hbase/ipc/BadAuthException.html
index 8a98cb8..178d9ab 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/BadAuthException.html
+++ b/apidocs/org/apache/hadoop/hbase/ipc/BadAuthException.html
@@ -306,6 +306,6 @@ extends Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ipc/CallCancelledException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ipc/CallCancelledException.html 
b/apidocs/org/apache/hadoop/hbase/ipc/CallCancelledException.html
index 2e55d57..9b2f324 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/CallCancelledException.html
+++ b/apidocs/org/apache/hadoop/hbase/ipc/CallCancelledException.html
@@ -271,6 +271,6 @@ extends 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ipc/CallTimeoutException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ipc/CallTimeoutException.html 
b/apidocs/org/apache/hadoop/hbase/ipc/CallTimeoutException.html
index e184154..9d65468 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/CallTimeoutException.html
+++ b/apidocs/org/apache/hadoop/hbase/ipc/CallTimeoutException.html
@@ -271,6 +271,6 @@ extends 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.html 
b/apidocs/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.html
index 8bf31cf..3c7176b 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.html
+++ b/apidocs/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.html
@@ -268,6 +268,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html 
b/apidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
index 22304cb..08a987a 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
+++ b/apidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
@@ -271,6 +271,6 @@ extends 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html 
b/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
index fc9cb48..1aac639 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/C

[16/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
index 28a04b5..6de6c3e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
@@ -131,7 +131,7 @@
 123byte [] family = 
CellUtil.cloneFamily(cell);
 124List list = 
this.familyMap.get(family);
 125if (list == null) {
-126  list  = new 
ArrayList();
+126  list  = new 
ArrayList(1);
 127}
 128// find where the new entry should be 
placed in the List
 129list.add(cell);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html
index f040f1e..177410b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncConnection.html
@@ -58,30 +58,62 @@
 050  AsyncTableRegionLocator 
getRegionLocator(TableName tableName);
 051
 052  /**
-053   * Retrieve an RawAsyncTable 
implementation for accessing a table. The returned Table is not
-054   * thread safe, a new instance should 
be created for each using thread. This is a lightweight
-055   * operation, pooling or caching of the 
returned AsyncTable is neither required nor desired.
-056   * 

-057 * This method no longer checks table existence. An exception will be thrown if the table does not -058 * exist only when the first operation is attempted. -059 * @param tableName the name of the table -060 * @return an RawAsyncTable to use for interactions with this table -061 */ -062 RawAsyncTable getRawTable(TableName tableName); -063 -064 /** -065 * Retrieve an AsyncTable implementation for accessing a table. The returned Table is not thread -066 * safe, a new instance should be created for each using thread. This is a lightweight operation, -067 * pooling or caching of the returned AsyncTable is neither required nor desired. -068 *

-069 * This method no longer checks table existence. An exception will be thrown if the table does not -070 * exist only when the first operation is attempted. -071 * @param tableName the name of the table -072 * @param pool the thread pool to use for executing callback -073 * @return an AsyncTable to use for interactions with this table +053 * Retrieve an {@link RawAsyncTable} implementation for accessing a table. +054 *

+055 * The returned instance will use default configs. Use {@link #getRawTableBuilder(TableName)} if you +056 * want to customize some configs. +057 *

+058 * This method no longer checks table existence. An exception will be thrown if the table does not +059 * exist only when the first operation is attempted. +060 * @param tableName the name of the table +061 * @return an RawAsyncTable to use for interactions with this table +062 * @see #getRawTableBuilder(TableName) +063 */ +064 default RawAsyncTable getRawTable(TableName tableName) { +065return getRawTableBuilder(tableName).build(); +066 } +067 +068 /** +069 * Returns an {@link AsyncTableBuilder} for creating {@link RawAsyncTable}. +070 *

+071 * This method no longer checks table existence. An exception will be thrown if the table does not +072 * exist only when the first operation is attempted. +073 * @param tableName the name of the table 074 */ -075 AsyncTable getTable(TableName tableName, ExecutorService pool); -076} +075 AsyncTableBuilder getRawTableBuilder(TableName tableName); +076 +077 /** +078 * Retrieve an AsyncTable implementation for accessing a table. +079 *

+080 * This method no longer checks table existence. An exception will be thrown if the table does not +081 * exist only when the first operation is attempted. +082 * @param tableName the name of the table +083 * @param pool the thread pool to use for executing callback +084 * @return an AsyncTable to use for interactions with this table +085 */ +086 default AsyncTable getTable(TableName tableName, ExecutorService pool) { +087return getTableBuilder(tableName, pool).build(); +088 } +089 +090 /** +091 * Returns an {@link AsyncTableBuilder} for creating {@link AsyncTable}. +092 *

+093 * This method no longer checks table existence. An exception will be thrown if the table does not +094 * exist only when the first operation is attempted. +095 * @param tableName the name of the table +096 * @pa


[07/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 37af0c3..9c09190 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -40,800 +40,821 @@
 032
 033import org.apache.commons.logging.Log;
 034import 
org.apache.commons.logging.LogFactory;
-035import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-036import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-037import 
org.apache.hadoop.conf.Configuration;
-038import org.apache.hadoop.fs.FileSystem;
-039import org.apache.hadoop.fs.Path;
-040import org.apache.hadoop.hbase.Cell;
-041import 
org.apache.hadoop.hbase.CellComparator;
-042import 
org.apache.hadoop.hbase.CellUtil;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionLocation;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.KeyValue;
-048import 
org.apache.hadoop.hbase.KeyValueUtil;
-049import 
org.apache.hadoop.hbase.TableName;
+035import 
org.apache.hadoop.conf.Configuration;
+036import org.apache.hadoop.fs.FileSystem;
+037import org.apache.hadoop.fs.Path;
+038import org.apache.hadoop.hbase.Cell;
+039import 
org.apache.hadoop.hbase.CellComparator;
+040import 
org.apache.hadoop.hbase.CellUtil;
+041import 
org.apache.hadoop.hbase.HColumnDescriptor;
+042import 
org.apache.hadoop.hbase.HConstants;
+043import 
org.apache.hadoop.hbase.HRegionLocation;
+044import 
org.apache.hadoop.hbase.HTableDescriptor;
+045import 
org.apache.hadoop.hbase.KeyValue;
+046import 
org.apache.hadoop.hbase.KeyValueUtil;
+047import 
org.apache.hadoop.hbase.TableName;
+048import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+049import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 050import 
org.apache.hadoop.hbase.client.Connection;
 051import 
org.apache.hadoop.hbase.client.ConnectionFactory;
 052import 
org.apache.hadoop.hbase.client.Put;
-053import 
org.apache.hadoop.hbase.fs.HFileSystem;
-054import 
org.apache.hadoop.hbase.client.RegionLocator;
-055import 
org.apache.hadoop.hbase.client.Table;
+053import 
org.apache.hadoop.hbase.client.RegionLocator;
+054import 
org.apache.hadoop.hbase.client.Table;
+055import 
org.apache.hadoop.hbase.fs.HFileSystem;
 056import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 057import 
org.apache.hadoop.hbase.io.compress.Compression;
 058import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 059import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-060import 
org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
-061import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-062import 
org.apache.hadoop.hbase.io.hfile.HFile;
-063import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-064import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+060import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
+061import 
org.apache.hadoop.hbase.io.hfile.HFile;
+062import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
+063import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+064import 
org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
 065import 
org.apache.hadoop.hbase.regionserver.BloomType;
 066import 
org.apache.hadoop.hbase.regionserver.HStore;
 067import 
org.apache.hadoop.hbase.regionserver.StoreFile;
 068import 
org.apache.hadoop.hbase.regionserver.StoreFileWriter;
 069import 
org.apache.hadoop.hbase.util.Bytes;
 070import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-071import 
org.apache.hadoop.io.NullWritable;
-072import 
org.apache.hadoop.io.SequenceFile;
-073import org.apache.hadoop.io.Text;
-074import org.apache.hadoop.mapreduce.Job;
-075import 
org.apache.hadoop.mapreduce.OutputFormat;
-076import 
org.apache.hadoop.mapreduce.RecordWriter;
-077import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-078import 
org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
-079import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-080import 
org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
-081
-082import 
com.google.common.annotations.VisibleForTesting;
-083
-084/**
-085 * Writes HFiles. Passed Cells must 
arrive in order.
-086 * Writes current time as the sequence id 
for the file. Sets the major compacted
-087 * attribute on created @{link {@link 
HFile}s. Calling write(null,null) will forcibly roll
-088 * all HFiles being written.
-089 * 

-090 * Using this class as part of a MapReduce job is best done -091 * using {@link #configureIncrementalLoad(Job, HTableDescriptor, RegionLocator, Class)}


[24/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/rest/Constants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/rest/Constants.html 
b/apidocs/org/apache/hadoop/hbase/rest/Constants.html
index 210aaa8..4e17ad8 100644
--- a/apidocs/org/apache/hadoop/hbase/rest/Constants.html
+++ b/apidocs/org/apache/hadoop/hbase/rest/Constants.html
@@ -182,86 +182,122 @@ public interface 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REST_DNS_INTERFACE 
+REST_CONNECTOR_ACCEPT_QUEUE_SIZE 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REST_DNS_NAMESERVER 
+REST_DNS_INTERFACE 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REST_KERBEROS_PRINCIPAL 
+REST_DNS_NAMESERVER 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REST_KEYTAB_FILE 
+REST_KERBEROS_PRINCIPAL 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REST_KEYTAB_FILE 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 REST_SSL_ENABLED 
 
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REST_SSL_EXCLUDE_CIPHER_SUITES 
+
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REST_SSL_KEYSTORE_KEYPASSWORD 
+REST_SSL_EXCLUDE_PROTOCOLS 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-REST_SSL_KEYSTORE_PASSWORD 
+REST_SSL_INCLUDE_CIPHER_SUITES 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REST_SSL_INCLUDE_PROTOCOLS 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REST_SSL_KEYSTORE_KEYPASSWORD 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REST_SSL_KEYSTORE_PASSWORD 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 REST_SSL_KEYSTORE_STORE 
 
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REST_THREAD_POOL_TASK_QUEUE_SIZE 
+
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-ROW_KEYS_PARAM_NAME 
+REST_THREAD_POOL_THREAD_IDLE_TIMEOUT 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_BATCH_SIZE 
+REST_THREAD_POOL_THREADS_MAX 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_COLUMN 
+REST_THREAD_POOL_THREADS_MIN 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_END_ROW 
+ROW_KEYS_PARAM_NAME 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_END_TIME 
+SCAN_BATCH_SIZE 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_FETCH_SIZE 
+SCAN_COLUMN 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_FILTER 
+SCAN_END_ROW 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_LIMIT 
+SCAN_END_TIME 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_MAX_VERSIONS 
+SCAN_FETCH_SIZE 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_REVERSED 
+SCAN_FILTER 
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-SCAN_START_ROW 
+SCAN_LIMIT 
 
 
 sta

[10/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
index 0a33b24..834129e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/ParseFilter.html
@@ -169,708 +169,715 @@
 161  throw new 
IllegalArgumentException("Mismatched parenthesis");
 162}
 163ByteBuffer argumentOnTopOfStack = 
operatorStack.peek();
-164while 
(!(argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER))) {
-165  
filterStack.push(popArguments(operatorStack, filterStack));
-166  if (operatorStack.empty()) {
-167throw new 
IllegalArgumentException("Mismatched parenthesis");
-168  }
-169  argumentOnTopOfStack = 
operatorStack.pop();
-170}
-171  } else {
-172// SimpleFilterExpression found
-173byte [] filterSimpleExpression = 
extractFilterSimpleExpression(filterStringAsByteArray, i);
-174i+= 
(filterSimpleExpression.length - 1);
-175filter = 
parseSimpleFilterExpression(filterSimpleExpression);
-176filterStack.push(filter);
-177  }
-178}
-179
-180// Finished parsing filterString
-181while (!operatorStack.empty()) {
-182  
filterStack.push(popArguments(operatorStack, filterStack));
-183}
-184filter = filterStack.pop();
-185if (!filterStack.empty()) {
-186  throw new 
IllegalArgumentException("Incorrect Filter String");
+164if 
(argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER)) {
+165  operatorStack.pop();
+166  continue;
+167}
+168while 
(!(argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER))) {
+169  
filterStack.push(popArguments(operatorStack, filterStack));
+170  if (operatorStack.empty()) {
+171throw new 
IllegalArgumentException("Mismatched parenthesis");
+172  }
+173  argumentOnTopOfStack = 
operatorStack.pop();
+174}
+175  } else {
+176// SimpleFilterExpression found
+177byte [] filterSimpleExpression = 
extractFilterSimpleExpression(filterStringAsByteArray, i);
+178i+= 
(filterSimpleExpression.length - 1);
+179filter = 
parseSimpleFilterExpression(filterSimpleExpression);
+180filterStack.push(filter);
+181  }
+182}
+183
+184// Finished parsing filterString
+185while (!operatorStack.empty()) {
+186  
filterStack.push(popArguments(operatorStack, filterStack));
 187}
-188return filter;
-189  }
-190
-191/**
-192 * Extracts a simple filter expression 
from the filter string given by the user
-193 * 

-194 * A simpleFilterExpression is of the form: FilterName('arg', 'arg', 'arg') -195 * The user given filter string can have many simpleFilterExpressions combined -196 * using operators. -197 *

-198 * This function extracts a simpleFilterExpression from the -199 * larger filterString given the start offset of the simpler expression +188if (filterStack.empty()) { +189throw new IllegalArgumentException("Incorrect Filter String"); +190} +191filter = filterStack.pop(); +192if (!filterStack.empty()) { +193 throw new IllegalArgumentException("Incorrect Filter String"); +194} +195return filter; +196 } +197 +198/** +199 * Extracts a simple filter expression from the filter string given by the user 200 *

-201 * @param filterStringAsByteArray filter string given by the user -202 * @param filterExpressionStartOffset start index of the simple filter expression -203 * @return byte array containing the simple filter expression -204 */ -205 public byte [] extractFilterSimpleExpression (byte [] filterStringAsByteArray, -206 int filterExpressionStartOffset) -207throws CharacterCodingException { -208int quoteCount = 0; -209for (int i=filterExpressionStartOffset; i


[35/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/class-use/ConnectionFactory.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/ConnectionFactory.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/ConnectionFactory.html
index 947eb72..7b15919 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/ConnectionFactory.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/ConnectionFactory.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index 0b0710d..4c3092f 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -213,6 +213,6 @@ the order they are declared.
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 2dbc82b..76d7733 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -297,6 +297,18 @@
 Deletes the specified cells/rows in bulk.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListCompletableFutureVoid>>
+AsyncTableBase.delete(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
+Deletes the specified cells/rows in bulk.
+
+
+
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
+AsyncTableBase.deleteAll(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
+A simple version of batch delete.
+
+
 
 
 
@@ -410,6 +422,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/class-use/DoNotRetryRegionException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/DoNotRetryRegionException.html
 
b/apidocs/org/apache/hadoop/hbase/client/class-use/DoNotRetryRegionException.html
index 65f063b..cf71d36 100644
--- 
a/apidocs/org/apache/hadoop/hbase/client/class-use/DoNotRetryRegionException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/client/class-use/DoNotRetryRegionException.html
@@ -220,6 +220,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index ad0f3ec..b7596f5 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -306,6 +306,6 @@ the order they are declared.
 
 
 
-Copyright © 2007–2016 https://www.apac

[04/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteAdmin.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteAdmin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteAdmin.html
index 554ec88..e2f2023 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteAdmin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteAdmin.html
@@ -34,379 +34,401 @@
 026import javax.xml.bind.JAXBContext;
 027import javax.xml.bind.JAXBException;
 028import javax.xml.bind.Unmarshaller;
-029
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-032import 
org.apache.hadoop.conf.Configuration;
-033
-034import 
org.apache.hadoop.hbase.HTableDescriptor;
-035import 
org.apache.hadoop.hbase.rest.Constants;
-036import 
org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel;
-037import 
org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
-038import 
org.apache.hadoop.hbase.rest.model.TableListModel;
-039import 
org.apache.hadoop.hbase.rest.model.TableSchemaModel;
-040import 
org.apache.hadoop.hbase.rest.model.VersionModel;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042
-043@InterfaceAudience.Public
-044@InterfaceStability.Stable
-045public class RemoteAdmin {
-046
-047  final Client client;
-048  final Configuration conf;
-049  final String accessToken;
-050  final int maxRetries;
-051  final long sleepTime;
-052
-053  // This unmarshaller is necessary for 
getting the /version/cluster resource.
-054  // This resource does not support 
protobufs. Therefore this is necessary to
-055  // request/interpret it as XML.
-056  private static volatile Unmarshaller 
versionClusterUnmarshaller;
-057
-058  /**
-059   * Constructor
-060   * 
-061   * @param client
-062   * @param conf
-063   */
-064  public RemoteAdmin(Client client, 
Configuration conf) {
-065this(client, conf, null);
-066  }
-067
-068  static Unmarshaller getUnmarsheller() 
throws JAXBException {
-069
-070if (versionClusterUnmarshaller == 
null) {
-071
-072  
RemoteAdmin.versionClusterUnmarshaller = JAXBContext.newInstance(
-073  
StorageClusterVersionModel.class).createUnmarshaller();
-074}
-075return 
RemoteAdmin.versionClusterUnmarshaller;
-076  }
-077
-078  /**
-079   * Constructor
-080   * @param client
-081   * @param conf
-082   * @param accessToken
-083   */
-084  public RemoteAdmin(Client client, 
Configuration conf, String accessToken) {
-085this.client = client;
-086this.conf = conf;
-087this.accessToken = accessToken;
-088this.maxRetries = 
conf.getInt("hbase.rest.client.max.retries", 10);
-089this.sleepTime = 
conf.getLong("hbase.rest.client.sleep", 1000);
-090  }
-091
-092  /**
-093   * @param tableName name of table to 
check
-094   * @return true if all regions of the 
table are available
-095   * @throws IOException if a remote or 
network exception occurs
-096   */
-097  public boolean isTableAvailable(String 
tableName) throws IOException {
-098return 
isTableAvailable(Bytes.toBytes(tableName));
-099  }
-100
-101  /**
-102   * @return string representing the rest 
api's version
-103   * @throws IOException
-104   *   if the endpoint does not 
exist, there is a timeout, or some other
-105   *   general failure mode
-106   */
-107  public VersionModel getRestVersion() 
throws IOException {
-108
-109StringBuilder path = new 
StringBuilder();
-110path.append('/');
-111if (accessToken != null) {
-112  path.append(accessToken);
-113  path.append('/');
-114}
-115
-116path.append("version/rest");
-117
-118int code = 0;
-119for (int i = 0; i < maxRetries; 
i++) {
-120  Response response = 
client.get(path.toString(),
-121  Constants.MIMETYPE_PROTOBUF);
-122  code = response.getCode();
-123  switch (code) {
-124  case 200:
-125
-126VersionModel v = new 
VersionModel();
-127return (VersionModel) 
v.getObjectFromMessage(response.getBody());
-128  case 404:
-129throw new IOException("REST 
version not found");
-130  case 509:
-131try {
-132  Thread.sleep(sleepTime);
-133} catch (InterruptedException e) 
{
-134  throw 
(InterruptedIOException)new InterruptedIOException().initCause(e);
-135}
-136break;
-137  default:
-138throw new IOException("get 
request to " + path.toString()
-139+ " returned " + code);
-140  }
-141}
-142throw new IOException("get request to 
" + path.toString() + " timed out");
-143  }
-144
-145  /**
-146   * @return string representing the 
cluster's version
-147   * @throws IOException if the endpoint 
does not exist, there is a timeout, or some other general failure mode
-148   */
-149  pub

[03/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html 
b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html
index 21936c2..226ddc3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/Response.html
@@ -30,150 +30,153 @@
 022import java.io.IOException;
 023import java.io.InputStream;
 024
-025import org.apache.http.Header;
-026import org.apache.http.HttpResponse;
+025import org.apache.commons.logging.Log;
+026import 
org.apache.commons.logging.LogFactory;
 027import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 028import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-029import org.mortbay.log.Log;
-030
-031/**
-032 * The HTTP result code, response 
headers, and body of a HTTP response.
-033 */
-034@InterfaceAudience.Public
-035@InterfaceStability.Stable
-036public class Response {
-037  private int code;
-038  private Header[] headers;
-039  private byte[] body;
-040  private HttpResponse resp;
-041  private InputStream stream;
-042
-043  /**
-044   * Constructor
-045   * @param code the HTTP response code
-046   */
-047  public Response(int code) {
-048this(code, null, null);
-049  }
-050
-051  /**
-052   * Constructor
-053   * @param code the HTTP response code
-054   * @param headers the HTTP response 
headers
-055   */
-056  public Response(int code, Header[] 
headers) {
-057this(code, headers, null);
-058  }
-059
-060  /**
-061   * Constructor
-062   * @param code the HTTP response code
-063   * @param headers the HTTP response 
headers
-064   * @param body the response body, can 
be null
-065   */
-066  public Response(int code, Header[] 
headers, byte[] body) {
-067this.code = code;
-068this.headers = headers;
-069this.body = body;
-070  }
-071  
-072  /**
-073   * Constructor
-074   * @param code the HTTP response code
-075   * @param headers headers the HTTP 
response headers
-076   * @param resp the response
-077   * @param in Inputstream if the 
response had one.
-078   * Note: this is not thread-safe
-079   */
-080  public Response(int code, Header[] 
headers, HttpResponse resp, InputStream in) {
-081this.code = code;
-082this.headers = headers;
-083this.body = null;
-084this.resp = resp;
-085this.stream = in;
-086  }
-087
-088  /**
-089   * @return the HTTP response code
-090   */
-091  public int getCode() {
-092return code;
-093  }
-094  
-095  /**
-096   * Gets the input stream instance.
-097   *
-098   * @return an instance of InputStream 
class.
-099   */
-100  public InputStream getStream(){
-101return this.stream;
-102  }
-103
-104  /**
-105   * @return the HTTP response headers
-106   */
-107  public Header[] getHeaders() {
-108return headers;
-109  }
-110
-111  public String getHeader(String key) {
-112for (Header header: headers) {
-113  if 
(header.getName().equalsIgnoreCase(key)) {
-114return header.getValue();
-115  }
-116}
-117return null;
-118  }
-119
-120  /**
-121   * @return the value of the Location 
header
-122   */
-123  public String getLocation() {
-124return getHeader("Location");
-125  }
-126
-127  /**
-128   * @return true if a response body was 
sent
-129   */
-130  public boolean hasBody() {
-131return body != null;
-132  }
-133
-134  /**
-135   * @return the HTTP response body
-136   */
-137  public byte[] getBody() {
-138if (body == null) {
-139  try {
-140body = 
Client.getResponseBody(resp);
-141  } catch (IOException ioe) {
-142Log.debug("encountered ioe when 
obtaining body", ioe);
-143  }
-144}
-145return body;
-146  }
-147
-148  /**
-149   * @param code the HTTP response code
-150   */
-151  public void setCode(int code) {
-152this.code = code;
-153  }
-154
-155  /**
-156   * @param headers the HTTP response 
headers
-157   */
-158  public void setHeaders(Header[] 
headers) {
-159this.headers = headers;
-160  }
-161
-162  /**
-163   * @param body the response body
-164   */
-165  public void setBody(byte[] body) {
-166this.body = body;
-167  }
-168}
+029import org.apache.http.Header;
+030import org.apache.http.HttpResponse;
+031
+032/**
+033 * The HTTP result code, response 
headers, and body of a HTTP response.
+034 */
+035@InterfaceAudience.Public
+036@InterfaceStability.Stable
+037public class Response {
+038  private static final Log LOG = 
LogFactory.getLog(Response.class);
+039
+040  private int code;
+041  private Header[] headers;
+042  private byte[] body;
+043  private HttpResponse resp;
+044  private InputStream stream;
+045
+046  /**
+047   * Constructor
+048   * @param code the HTTP response code
+049   */
+050  public Response(int code) {
+051this(code,

[47/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 6dfd615..a64c70f 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -154,10 +154,6 @@
 
 Deprecated.
  
-add(long,
 long) - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
-
-Adds a value to the histogram.
-
 addAllServers(Collection)
 - Method in class org.apache.hadoop.hbase.rsgroup.RSGroupInfo
 
 Adds a group of servers.
@@ -368,6 +364,7 @@
 
 addPeer(String,
 ReplicationPeerConfig) - Method in class 
org.apache.hadoop.hbase.client.replication.ReplicationAdmin
 
+Deprecated.
 Add a new remote slave cluster for replication.
 
 addRegionServer()
 - Method in class org.apache.hadoop.hbase.LocalHBaseCluster
@@ -376,6 +373,10 @@
  
 addRegionServer(Configuration,
 int, User) - Method in class org.apache.hadoop.hbase.LocalHBaseCluster
  
+addReplicationPeer(String,
 ReplicationPeerConfig) - Method in interface 
org.apache.hadoop.hbase.client.Admin
+
+Add a new replication peer for replicating data to slave 
cluster
+
 addServer(HostAndPort)
 - Method in class org.apache.hadoop.hbase.rsgroup.RSGroupInfo
 
 Adds the server to the group.
@@ -449,6 +450,7 @@
 
 appendPeerTableCFs(String,
 Map>) - Method 
in class org.apache.hadoop.hbase.client.replication.ReplicationAdmin
 
+Deprecated.
 Append the replicable table-cf config of the specified 
peer
 
 apply(byte)
 - Method in enum org.apache.hadoop.hbase.util.Order
@@ -497,6 +499,10 @@
  
 assign(byte[])
 - Method in interface org.apache.hadoop.hbase.client.Admin
  
+AsyncAdmin - Interface in org.apache.hadoop.hbase.client
+
+The asynchronous administrative API for HBase.
+
 AsyncConnection - Interface in org.apache.hadoop.hbase.client
 
 The asynchronous version of Connection.
@@ -509,6 +515,10 @@
 
 The base interface for asynchronous version of Table.
 
+AsyncTableBuilder - Interface in org.apache.hadoop.hbase.client
+
+For creating AsyncTable or RawAsyncTable.
+
 AsyncTableRegionLocator - Interface in org.apache.hadoop.hbase.client
 
 The asynchronous version of RegionLocator.
@@ -521,6 +531,10 @@
 
 Utility methods for helping with security tasks.
 
+await(long,
 TimeUnit) - Method in class 
org.apache.hadoop.hbase.client.locking.EntityLock
+ 
+await()
 - Method in class org.apache.hadoop.hbase.client.locking.EntityLock
+ 
 
 
 
@@ -549,6 +563,14 @@
 
 Invoke the balancer.
 
+balancer()
 - Method in interface org.apache.hadoop.hbase.client.AsyncAdmin
+
+Invoke the balancer.
+
+balancer(boolean)
 - Method in interface org.apache.hadoop.hbase.client.AsyncAdmin
+
+Invoke the balancer.
+
 base
 - Variable in class org.apache.hadoop.hbase.types.FixedLengthWrapper
  
 Base64 - 
Class in org.apache.hadoop.hbase.util
@@ -585,12 +607,20 @@
 
 BASE_NAMESPACE_DIR
 - Static variable in class org.apache.hadoop.hbase.HConstants
  
+batch(List) - Method in interface 
org.apache.hadoop.hbase.client.AsyncTableBase
+
+Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
+
 batch(List, Object[]) - Method in interface 
org.apache.hadoop.hbase.client.Table
 
 Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
 
 batch(List, Object[]) - Method in class 
org.apache.hadoop.hbase.rest.client.RemoteHTable
  
+batchAll(List) - Method in interface 
org.apache.hadoop.hbase.client.AsyncTableBase
+
+A simple version of batch.
+
 batchCallback(List, Object[], Batch.Callback) - Method in 
interface org.apache.hadoop.hbase.client.Table
 
 Same as Table.batch(List,
 Object[]), but with a callback.
@@ -724,8 +754,18 @@
 
 BufferedMutatorParams(TableName)
 - Constructor for class org.apache.hadoop.hbase.client.BufferedMutatorParams
  
+build()
 - Method in interface org.apache.hadoop.hbase.client.AsyncTableBuilder
+
+Create the AsyncTable or RawAsyncTable instance.
+
+build()
 - Method in interface org.apache.hadoop.hbase.client.TableBuilder
+
+Create the Table instance.
+
 build()
 - Method in class org.apache.hadoop.hbase.NamespaceDescriptor.Builder
  
+buildClientServiceCallable(Connection,
 TableName, byte[], Collection, 
boolean) - Method in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
+ 
 buildDependencyClasspath(Configuration)
 - Static method in class org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
 
 Returns a classpath string built from the content of the 
"tmpjars" value in conf.
@@ -882,6 +922,10 @@
  
 CACHE_INDEX_ON_WRITE
 - Static variable in class org.apache.hadoop.hbase.HColumnDescriptor
  
+call(S,
 RpcController, RpcCallback) - Method in interface 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable
+
+Represent the actual protobuf rpc cal

[48/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 6fbd5cc..9cf49c9 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20161207204746)
-  /CreationDate (D:20161207204746)
+  /ModDate (D:20170205144937)
+  /CreationDate (D:20170205144937)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 78b13e4..6fe32c0 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -15,9 +15,11 @@
 AccessDeniedException
 Admin
 Append
+AsyncAdmin
 AsyncConnection
 AsyncTable
 AsyncTableBase
+AsyncTableBuilder
 AsyncTableRegionLocator
 Attributes
 AuthUtil
@@ -95,6 +97,7 @@
 Encryption.Context
 EncryptionTest
 Encryptor
+EntityLock
 ExponentialClientBackoffPolicy
 Export
 ExportSnapshot
@@ -105,7 +108,6 @@
 FailedSyncBeforeLogCloseException
 FallbackDisallowedException
 FamilyFilter
-FastLongHistogram
 FatalConnectionException
 FileSystemVersionException
 Filter
@@ -166,8 +168,10 @@
 MasterNotRunningException
 MasterSwitchType
 MD5Hash
+MemoryCompactionPolicy
 MergeRegionException
 MiniZooKeeperCluster
+MobCompactPartitionPolicy
 MobConstants
 MultiActionResultTooLarge
 MultiHFileOutputFormat
@@ -233,6 +237,8 @@
 QuotaType
 RandomRowFilter
 RawAsyncTable
+RawAsyncTable.CoprocessorCallable
+RawAsyncTable.CoprocessorCallback
 RawByte
 RawBytes
 RawBytesFixedLength
@@ -266,6 +272,12 @@
 ReplicationAdmin
 ReplicationException
 ReplicationPeerConfig
+ReplicationPeerDescription
+ReplicationPeerNotFoundException
+RequestController
+RequestController.Checker
+RequestController.ReturnCode
+RequestControllerFactory
 RequestTooBigException
 Response
 RestoreSnapshotException
@@ -276,6 +288,7 @@
 RetriesExhaustedWithDetailsException
 RetryImmediatelyException
 Row
+RowAccess
 RowCounter
 RowCounter
 RowFilter
@@ -285,6 +298,7 @@
 RpcRetryingCaller
 RSGroupInfo
 Scan
+Scan.ReadType
 ScannerResetException
 ScannerTimeoutException
 ScanResultConsumer
@@ -315,6 +329,8 @@
 StructIterator
 SubstringComparator
 Table
+TableBuilder
+TableCFs
 TableExistsException
 TableInfoMissingException
 TableInputFormat

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index 66c8b56..a28ed3e 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -15,9 +15,11 @@
 AccessDeniedException
 Admin
 Append
+AsyncAdmin
 AsyncConnection
 AsyncTable
 AsyncTableBase
+AsyncTableBuilder
 AsyncTableRegionLocator
 Attributes
 AuthUtil
@@ -95,6 +97,7 @@
 Encryption.Context
 EncryptionTest
 Encryptor
+EntityLock
 ExponentialClientBackoffPolicy
 Export
 ExportSnapshot
@@ -105,7 +108,6 @@
 FailedSyncBeforeLogCloseException
 FallbackDisallowedException
 FamilyFilter
-FastLongHistogram
 FatalConnectionException
 FileSystemVersionException
 Filter
@@ -166,8 +168,10 @@
 MasterNotRunningException
 MasterSwitchType
 MD5Hash
+MemoryCompactionPolicy
 MergeRegionException
 MiniZooKeeperCluster
+MobCompactPartitionPolicy
 MobConstants
 MultiActionResultTooLarge
 MultiHFileOutputFormat
@@ -233,6 +237,8 @@
 QuotaType
 RandomRowFilter
 RawAsyncTable
+RawAsyncTable.CoprocessorCallable
+RawAsyncTable.CoprocessorCallback
 RawByte
 RawBytes
 RawBytesFixedLength
@@ -266,6 +272,12 @@
 ReplicationAdmin
 ReplicationException
 ReplicationPeerConfig
+ReplicationPeerDescription
+ReplicationPeerNotFoundException
+RequestController
+RequestController.Checker
+RequestController.ReturnCode
+RequestControllerFactory
 RequestTooBigException
 Response
 RestoreSnapshotException
@@ -276,6 +288,7 @@
 RetriesExhaustedWithDetailsException
 RetryImmediatelyException
 Row
+RowAccess
 RowCounter
 RowCounter
 RowFilter
@@ -285,6 +298,7 @@
 RpcRetryingCaller
 RSGroupInfo
 Scan
+Scan.ReadType
 ScannerResetException
 ScannerTimeoutException
 ScanResultConsumer
@@ -315,6 +329,8 @@
 StructIterator
 SubstringComparator
 Table
+TableBuilder
+TableCFs
 TableExistsException
 TableInfoMissingException
 TableInputFormat

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/com/google/protobuf/package-summary.html
--
diff --git a/apidocs/com/google/protobuf/package-summary.html 
b/apidocs/com/google/protobuf/package-summary.html
index af2bb5b..a740d2c 1006

[44/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index d19b7e4..3f508f0 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1013,10 +1013,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_RPC_TIMEOUT_KEY
-Deprecated. 
-Use HBASE_RPC_READ_TIMEOUT_KEY
 or HBASE_RPC_WRITE_TIMEOUT_KEY
- instead.
-
+timeout for each RPC
 
 
 
@@ -1608,6 +1605,16 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 REPLICATION_SOURCE_SERVICE_CLASSNAME 
 
 
+static int
+REPLICATION_SOURCE_TOTAL_BUFFER_DFAULT 
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+REPLICATION_SOURCE_TOTAL_BUFFER_KEY
+Max total size of buffered entries in all replication 
peers.
+
+
+
 static int[]
 RETRY_BACKOFF
 Retrying we multiply hbase.client.pause setting by what we 
have in this array until we
@@ -4496,10 +4503,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 HBASE_RPC_TIMEOUT_KEY
-http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_TIMEOUT_KEY
-Deprecated. Use HBASE_RPC_READ_TIMEOUT_KEY
 or HBASE_RPC_WRITE_TIMEOUT_KEY
- instead.
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_TIMEOUT_KEY
 timeout for each RPC
 
 See Also:
@@ -4513,7 +4517,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 HBASE_RPC_READ_TIMEOUT_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_READ_TIMEOUT_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_READ_TIMEOUT_KEY
 timeout for each read RPC
 
 See Also:
@@ -4527,7 +4531,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 HBASE_RPC_WRITE_TIMEOUT_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_WRITE_TIMEOUT_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_WRITE_TIMEOUT_KEY
 timeout for each write RPC
 
 See Also:
@@ -4541,7 +4545,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 DEFAULT_HBASE_RPC_TIMEOUT
-public static final int DEFAULT_HBASE_RPC_TIMEOUT
+public static final int DEFAULT_HBASE_RPC_TIMEOUT
 Default value of HBASE_RPC_TIMEOUT_KEY
 
 See Also:
@@ -4555,7 +4559,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY
 timeout for short operation RPC
 
 See Also:
@@ -4569,7 +4573,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT
-public static final int DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT
+public static final int DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT
 Default value of HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY
 
 See Also:
@@ -4583,7 +4587,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 NO_SEQNUM
-public static final long NO_SEQNUM
+public static final long NO_SEQNUM
 Value indicating the server name was saved with no sequence 
number.
 
 See Also:
@@ -4597,7 +4601,7 @@ public static final http://docs.oracle.com/javase/8/docs/api/java/
 
 
 REPLICATION_SOURCE_SERVICE_CLASSNAME
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String REPLICATION_SOURCE_SERVICE_CLASSNAME
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interfac

[25/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html 
b/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
index d12a000..3119dcd 100644
--- a/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
+++ b/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -150,51 +150,59 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Method and Description
 
 
+long
+getBandwidth() 
+
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getClusterKey() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>
 getConfiguration() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString>
 getNamespaces() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">Map
 getPeerData() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getReplicationEndpointImpl() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapListString>>
 getTableCFsMap() 
 
-
+
+ReplicationPeerConfig
+setBandwidth(long bandwidth) 
+
+
 ReplicationPeerConfig
 setClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String clusterKey)
 Set the clusterKey which is the concatenation of the slave 
cluster's:
   
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
 
 
-
+
 ReplicationPeerConfig
 setNamespaces(http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetString> namespaces) 
 
-
+
 ReplicationPeerConfig
 setReplicationEndpointImpl(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String replicationEndpointImpl)
 Sets the ReplicationEndpoint plugin class for this 
peer.
 
 
-
+
 ReplicationPeerConfig
 setTableCFsMap(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapCollectionString>> tableCFsMap) 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 toString() 
 
@@ -226,7 +234,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ReplicationPeerConfig
-public ReplicationPeerConfig()
+public ReplicationPeerConfig()
 
 
 
@@ -243,7 +251,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setClusterKey
-public ReplicationPeerConfig setClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String clusterKey)
+public ReplicationPeerConfig setClusterKey(http://

[34/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/package-frame.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/package-frame.html 
b/apidocs/org/apache/hadoop/hbase/client/package-frame.html
index a2256c3..344e261 100644
--- a/apidocs/org/apache/hadoop/hbase/client/package-frame.html
+++ b/apidocs/org/apache/hadoop/hbase/client/package-frame.html
@@ -14,22 +14,30 @@
 Interfaces
 
 Admin
+AsyncAdmin
 AsyncConnection
 AsyncTable
 AsyncTableBase
+AsyncTableBuilder
 AsyncTableRegionLocator
 Attributes
 BufferedMutator
 BufferedMutator.ExceptionListener
 Connection
 RawAsyncTable
+RawAsyncTable.CoprocessorCallable
+RawAsyncTable.CoprocessorCallback
 RawScanResultConsumer
 RegionLocator
+RequestController
+RequestController.Checker
 ResultScanner
 Row
+RowAccess
 RpcRetryingCaller
 ScanResultConsumer
 Table
+TableBuilder
 
 Classes
 
@@ -47,6 +55,7 @@
 Put
 Query
 RegionLoadStats
+RequestControllerFactory
 Result
 RowMutations
 Scan
@@ -62,6 +71,9 @@
 Durability
 IsolationLevel
 MasterSwitchType
+MobCompactPartitionPolicy
+RequestController.ReturnCode
+Scan.ReadType
 SnapshotType
 
 Exceptions

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/client/package-summary.html
index ea0bd45..5a73a42 100644
--- a/apidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -93,23 +93,35 @@
 
 
 
+AsyncAdmin
+
+The asynchronous administrative API for HBase.
+
+
+
 AsyncConnection
 
 The asynchronous version of Connection.
 
 
-
+
 AsyncTable
 
 The asynchronous table for normal users.
 
 
-
+
 AsyncTableBase
 
 The base interface for asynchronous version of Table.
 
 
+
+AsyncTableBuilder
+
+For creating AsyncTable 
or RawAsyncTable.
+
+
 
 AsyncTableRegionLocator
 
@@ -147,6 +159,18 @@
 
 
 
+RawAsyncTable.CoprocessorCallable
+
+Delegate to a protobuf rpc call.
+
+
+
+RawAsyncTable.CoprocessorCallback
+
+The callback when we want to execute a coprocessor call on 
a range of regions.
+
+
+
 RawScanResultConsumer
 
 Receives Result for an asynchronous 
scan.
@@ -159,6 +183,18 @@
 
 
 
+RequestController
+
+An interface for client request scheduling algorithm.
+
+
+
+RequestController.Checker
+
+Picks up the valid data.
+
+
+
 ResultScanner
 
 Interface for client-side scanning.
@@ -171,21 +207,33 @@
 
 
 
+RowAccess
+
+Provide a way to access the inner buffer.
+
+
+
 RpcRetryingCaller
  
 
-
+
 ScanResultConsumer
 
 Receives Result for an asynchronous 
scan.
 
 
-
+
 Table
 
 Used to communicate with a single HBase table.
 
 
+
+TableBuilder
+
+For creating Table 
instance.
+
+
 
 
 
@@ -276,36 +324,42 @@
 
 
 
+RequestControllerFactory
+
+A factory class that constructs an RequestController.
+
+
+
 Result
 
 Single row result of a Get or Scan query.
 
 
-
+
 RowMutations
 
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 Scan
 
 Used to perform Scan operations.
 
 
-
+
 SnapshotDescription
 
 The POJO equivalent of HBaseProtos.SnapshotDescription
 
 
-
+
 TableSnapshotScanner
 
 A Scanner which performs a scan over snapshot files.
 
 
-
+
 UnmodifyableHTableDescriptor
 
 Read-only table descriptor.
@@ -362,6 +416,20 @@
 
 
 
+MobCompactPartitionPolicy
+
+Enum describing the mob compact partition policy 
types.
+
+
+
+RequestController.ReturnCode
+ 
+
+
+Scan.ReadType
+ 
+
+
 SnapshotType
 
 POJO representing the snapshot type
@@ -674,6 +742,6 @@ public class MyLittleHBaseClient {
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/client/package-tree.html
index 7244039..2bac222 100644
--- a/apidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -86,7 +86,7 @@
 org.apache.hadoop.hbase.client.TableSnapshotScanner
 
 
-org.apache.hadoop.hbase.client.BufferedMutatorParams
+org.apache.hadoop.hbase.client.BufferedMutatorParams (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true";
 title="class or interface in java.lang">Cloneable)
 org.apache.hadoop.hbase.client.ConnectionFactory
 org.apache.hadoop.hbase.HTableDescriptor (implements java.lang.http://

[40/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.html
index 1868dd7..6d71c8d 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":38,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":38,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"i120":38
 
,"i121":6,"i122":6,"i123":38,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":6,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":6,"i150":6,"i151":6,"i152":6,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":38,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":18,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":38,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":18,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":18,"i108":18,"i109":18,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"i
 
120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":38,"i134":6,"i135":6,"i136":38,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":18,"i148":6,"i149":6,"i150":6,"i151":6,"i152":6,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":18};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -107,7 +107,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public interface Admin
+public interface Admin
 extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true";
 title="class or interface in java.io">Closeable
 The administrative API for HBase. Obtain an instance from 
an Connection.getAdmin()
 and
  call close()
 afterwards.
@@ -134,7 +134,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 Method Summary
 
-All Methods Instance Methods Abstract Methods Deprecated Methods 
+All Methods Instance Methods Abstract Methods Default Methods Deprecated Methods 
 
 Modifier and Type
 Method and Description
@@ -187,36 +187,43 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 
+default void
+addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
+  ReplicationPeerConfig peerConfig)
+Add a new replication peer for replicating data to slave 
cluster
+
+
+
 void
 assign(byte[] regionName) 
 
-
+
 boole

[30/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/conf/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/conf/package-use.html 
b/apidocs/org/apache/hadoop/hbase/http/conf/package-use.html
index d4eb19d..a0180e2 100644
--- a/apidocs/org/apache/hadoop/hbase/http/conf/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/http/conf/package-use.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/jmx/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/jmx/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/http/jmx/package-summary.html
index a82d399..db80922 100644
--- a/apidocs/org/apache/hadoop/hbase/http/jmx/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/http/jmx/package-summary.html
@@ -135,6 +135,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/jmx/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/jmx/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/http/jmx/package-tree.html
index 54455fd..6332cba 100644
--- a/apidocs/org/apache/hadoop/hbase/http/jmx/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/http/jmx/package-tree.html
@@ -123,6 +123,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/jmx/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/jmx/package-use.html 
b/apidocs/org/apache/hadoop/hbase/http/jmx/package-use.html
index f0609f4..e251add 100644
--- a/apidocs/org/apache/hadoop/hbase/http/jmx/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/http/jmx/package-use.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/log/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/log/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/http/log/package-summary.html
index ebfe6b1..8ad93e0 100644
--- a/apidocs/org/apache/hadoop/hbase/http/log/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/http/log/package-summary.html
@@ -119,6 +119,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/log/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/log/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/http/log/package-tree.html
index 6ac108a..bd93f20 100644
--- a/apidocs/org/apache/hadoop/hbase/http/log/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/http/log/package-tree.html
@@ -123,6 +123,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/http/log/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/http/log/package-use.html 
b/apidocs/org/apache/hadoop/hbase/http/log/package-use.html
index b862a27..6e1208b 100644
--- a/apidocs/org/apache/hadoop/hbase/http/log/package-use.html
+++ b/apidocs/org/apache/hadoop/hbase/http/log/package-use.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright ©

[11/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

2017-02-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/errorhandling/ForeignException.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/errorhandling/ForeignException.html 
b/apidocs/src-html/org/apache/hadoop/hbase/errorhandling/ForeignException.html
index a746bcc..ab67bc0 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/errorhandling/ForeignException.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/errorhandling/ForeignException.html
@@ -189,7 +189,7 @@
 181   * the sender).
 182   */
 183  private static StackTraceElement[] 
toStackTrace(List traceList) {
-184if (traceList == null || 
traceList.size() == 0) {
+184if (traceList == null || 
traceList.isEmpty()) {
 185  return new StackTraceElement[0]; // 
empty array
 186}
 187StackTraceElement[] trace = new 
StackTraceElement[traceList.size()];

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index bb0976f..52dbaf6 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -185,7 +185,7 @@
 177  
  " can only be used with EQUAL and NOT_EQUAL");
 178  }
 179}
-180ArrayList arguments = 
new ArrayList();
+180ArrayList arguments = 
new ArrayList(2);
 181arguments.add(compareOp);
 182arguments.add(comparator);
 183return arguments;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
index bb0976f..52dbaf6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -185,7 +185,7 @@
 177  
  " can only be used with EQUAL and NOT_EQUAL");
 178  }
 179}
-180ArrayList arguments = 
new ArrayList();
+180ArrayList arguments = 
new ArrayList(2);
 181arguments.add(compareOp);
 182arguments.add(comparator);
 183return arguments;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.html
index f44ae45..1293a7d 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.html
@@ -69,7 +69,7 @@
 061  }
 062
 063  public static Filter 
createFilterFromArguments(ArrayList filterArguments) {
-064
Preconditions.checkArgument(filterArguments.size() == 0,
+064
Preconditions.checkArgument(filterArguments.isEmpty(),
 065"Expected 
0 but got: %s", filterArguments.size());
 066return new FirstKeyOnlyFilter();
 067  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
index 3703791..d1dabd3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
@@ -83,7 +83,7 @@
 075  }
 076  
 077  public static Filter 
createFilterFromArguments(ArrayList filterArguments) {
-078
Preconditions.checkArgument((filterArguments.size() == 0 || 
filterArguments.size() == 1),
+078
Preconditions.checkArgument((filterArguments.isEmpty() || 
filterArguments.size() == 1),
 079
"Expected: 0 or 1 but got: %s", filterArguments.size());
 080KeyOnlyFilter filter = new 
KeyOnlyFilter();
 081if (filterArguments.size() == 1) {

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
---

[46/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/AuthUtil.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/AuthUtil.html 
b/apidocs/org/apache/hadoop/hbase/AuthUtil.html
index cf0cb40..f377747 100644
--- a/apidocs/org/apache/hadoop/hbase/AuthUtil.html
+++ b/apidocs/org/apache/hadoop/hbase/AuthUtil.html
@@ -281,6 +281,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/CallDroppedException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/CallDroppedException.html 
b/apidocs/org/apache/hadoop/hbase/CallDroppedException.html
index 2120614..9fb6b2e 100644
--- a/apidocs/org/apache/hadoop/hbase/CallDroppedException.html
+++ b/apidocs/org/apache/hadoop/hbase/CallDroppedException.html
@@ -279,6 +279,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/CallQueueTooBigException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/CallQueueTooBigException.html 
b/apidocs/org/apache/hadoop/hbase/CallQueueTooBigException.html
index 4fd2711..39e9fcf 100644
--- a/apidocs/org/apache/hadoop/hbase/CallQueueTooBigException.html
+++ b/apidocs/org/apache/hadoop/hbase/CallQueueTooBigException.html
@@ -277,6 +277,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/Cell.html 
b/apidocs/org/apache/hadoop/hbase/Cell.html
index fbf171c..0d73a43 100644
--- a/apidocs/org/apache/hadoop/hbase/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/Cell.html
@@ -580,6 +580,6 @@ public interface Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/org/apache/hadoop/hbase/CellUtil.html
index 120bec7..22c98a3 100644
--- a/apidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -2592,6 +2592,6 @@ public static http://docs.oracle.com/javase/8/docs/api/java/util/I
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ChoreService.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ChoreService.html 
b/apidocs/org/apache/hadoop/hbase/ChoreService.html
index 6d889f9..db6705f 100644
--- a/apidocs/org/apache/hadoop/hbase/ChoreService.html
+++ b/apidocs/org/apache/hadoop/hbase/ChoreService.html
@@ -400,6 +400,6 @@ public Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html 
b/apidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
index 71cdb30..acc9c5d 100644
--- a/apidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
+++ b/apidocs/org/apache/hadoop/hbase/ClockOutOfSyncException.html
@@ -267,6 +267,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 
-Copyrig

[31/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html 
b/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
index 43b2165..7993db3 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/ParseFilter.html
@@ -406,7 +406,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 extractFilterSimpleExpression
-public byte[] extractFilterSimpleExpression(byte[] filterStringAsByteArray,
+public byte[] extractFilterSimpleExpression(byte[] filterStringAsByteArray,
 
int filterExpressionStartOffset)
  throws http://docs.oracle.com/javase/8/docs/api/java/nio/charset/CharacterCodingException.html?is-external=true";
 title="class or interface in 
java.nio.charset">CharacterCodingException
 Extracts a simple filter expression from the filter string 
given by the user
@@ -435,7 +435,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 parseSimpleFilterExpression
-public Filter parseSimpleFilterExpression(byte[] filterStringAsByteArray)
+public Filter parseSimpleFilterExpression(byte[] filterStringAsByteArray)
throws http://docs.oracle.com/javase/8/docs/api/java/nio/charset/CharacterCodingException.html?is-external=true";
 title="class or interface in 
java.nio.charset">CharacterCodingException
 Constructs a filter object given a simple filter expression
  
@@ -455,7 +455,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getFilterName
-public static byte[] getFilterName(byte[] filterStringAsByteArray)
+public static byte[] getFilterName(byte[] filterStringAsByteArray)
 Returns the filter name given a simple filter expression
  
 
@@ -472,7 +472,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getFilterArguments
-public static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList getFilterArguments(byte[] filterStringAsByteArray)
+public static http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in java.util">ArrayList getFilterArguments(byte[] filterStringAsByteArray)
 Returns the arguments of the filter from the filter string
  
 
@@ -489,7 +489,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 reduce
-public void reduce(http://docs.oracle.com/javase/8/docs/api/java/util/Stack.html?is-external=true";
 title="class or interface in java.util">StackByteBuffer> operatorStack,
+public void reduce(http://docs.oracle.com/javase/8/docs/api/java/util/Stack.html?is-external=true";
 title="class or interface in java.util">StackByteBuffer> operatorStack,
http://docs.oracle.com/javase/8/docs/api/java/util/Stack.html?is-external=true";
 title="class or interface in java.util">Stack filterStack,
http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer operator)
 This function is called while parsing the filterString and 
an operator is parsed
@@ -508,7 +508,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 popArguments
-public static Filter popArguments(http://docs.oracle.com/javase/8/docs/api/java/util/Stack.html?is-external=true";
 title="class or interface in java.util">StackByteBuffer> operatorStack,
+public static Filter popArguments(http://docs.oracle.com/javase/8/docs/api/java/util/Stack.html?is-external=true";
 title="class or interface in java.util">StackByteBuffer> operatorStack,
   http://docs.oracle.com/javase/8/docs/api/java/util/Stack.html?is-external=true";
 title="class or interface in java.util">Stack filterStack)
 Pops an argument from the operator stack and the number of 
arguments required by the operator
  from the filterStack and evaluates them
@@ -528,7 +528,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 hasHigherPriority
-public boolean hasHigherPriority(http://docs.oracle.com/javase/8/docs/api/java/nio/Byte

[22/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawByte.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/types/class-use/RawByte.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawByte.html
index fb459b7..ddc1f76 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawByte.html
+++ b/apidocs/org/apache/hadoop/hbase/types/class-use/RawByte.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytes.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytes.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytes.html
index ba4be10..648682a 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytes.html
+++ b/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytes.html
@@ -168,6 +168,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesFixedLength.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesFixedLength.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesFixedLength.html
index e217a1e..1c880b1 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesFixedLength.html
+++ b/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesFixedLength.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesTerminated.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesTerminated.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesTerminated.html
index 7b4d371..bbe1d33 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesTerminated.html
+++ b/apidocs/org/apache/hadoop/hbase/types/class-use/RawBytesTerminated.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawDouble.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/types/class-use/RawDouble.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawDouble.html
index ec92df8..71f9514 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawDouble.html
+++ b/apidocs/org/apache/hadoop/hbase/types/class-use/RawDouble.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawFloat.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/types/class-use/RawFloat.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawFloat.html
index cb7a8ef..699747b 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawFloat.html
+++ b/apidocs/org/apache/hadoop/hbase/types/class-use/RawFloat.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright © 2007–2016 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/org/apache/hadoop/hbase/types/class-use/RawInteger.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/types/class-use/RawInteger.html 
b/apidocs/org/apache/hadoop/hbase/types/class-use/RawInteger.html
index 85b64e7..d0e549f 100644
--- a/apidocs/org/apache/hadoop/hbase/types/class-use/RawInteger.html
+++ b/api

[12/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 0094b10..e237805 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -37,641 +37,525 @@
 029import java.util.HashSet;
 030import java.util.List;
 031import java.util.Map;
-032import java.util.Map.Entry;
-033import java.util.Set;
-034
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.conf.Configuration;
-038import 
org.apache.hadoop.hbase.Abortable;
-039import 
org.apache.hadoop.hbase.HColumnDescriptor;
-040import 
org.apache.hadoop.hbase.HConstants;
-041import 
org.apache.hadoop.hbase.HTableDescriptor;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.TableNotFoundException;
-044import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-046import 
org.apache.hadoop.hbase.client.Admin;
-047import 
org.apache.hadoop.hbase.client.Connection;
-048import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-049import 
org.apache.hadoop.hbase.client.RegionLocator;
-050import 
org.apache.hadoop.hbase.replication.ReplicationException;
-051import 
org.apache.hadoop.hbase.replication.ReplicationFactory;
-052import 
org.apache.hadoop.hbase.replication.ReplicationPeer;
-053import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-054import 
org.apache.hadoop.hbase.replication.ReplicationPeerZKImpl;
-055import 
org.apache.hadoop.hbase.replication.ReplicationPeers;
-056import 
org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
-057import 
org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
-058import 
org.apache.hadoop.hbase.util.Pair;
-059import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-060
-061/**
-062 * 

-063 * This class provides the administrative interface to HBase cluster -064 * replication. In order to use it, the cluster and the client using -065 * ReplicationAdmin must be configured with hbase.replication -066 * set to true. -067 *

-068 *

-069 * Adding a new peer results in creating new outbound connections from every -070 * region server to a subset of region servers on the slave cluster. Each -071 * new stream of replication will start replicating from the beginning of the -072 * current WAL, meaning that edits from that past will be replicated. -073 *

-074 *

-075 * Removing a peer is a destructive and irreversible operation that stops -076 * all the replication streams for the given cluster and deletes the metadata -077 * used to keep track of the replication state. -078 *

-079 *

-080 * To see which commands are available in the shell, type -081 * replication. -082 *

-083 */ -084@InterfaceAudience.Public -085@InterfaceStability.Evolving -086public class ReplicationAdmin implements Closeable { -087 private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class); -088 -089 public static final String TNAME = "tableName"; -090 public static final String CFNAME = "columnFamilyName"; -091 -092 // only Global for now, can add other type -093 // such as, 1) no global replication, or 2) the table is replicated to this cluster, etc. -094 public static final String REPLICATIONTYPE = "replicationType"; -095 public static final String REPLICATIONGLOBAL = -096 Integer.toString(HConstants.REPLICATION_SCOPE_GLOBAL); -097 public static final String REPLICATIONSERIAL = -098 Integer.toString(HConstants.REPLICATION_SCOPE_SERIAL); -099 -100 private final Connection connection; -101 // TODO: replication should be managed by master. All the classes except ReplicationAdmin should -102 // be moved to hbase-server. Resolve it in HBASE-11392. -103 private final ReplicationQueuesClient replicationQueuesClient; -104 private final ReplicationPeers replicationPeers; -105 /** -106 * A watcher used by replicationPeers and replicationQueuesClient. Keep reference so can dispose -107 * on {@link #close()}. -108 */ -109 private final ZooKeeperWatcher zkw; -110 -111 /** -112 * Constructor that creates a connection to the local ZooKeeper ensemble. -113 * @param conf Configuration to use -114 * @throws IOException if an internal replication error occurs -115 * @throws RuntimeException if replication isn't enabled. -116 */ -117 public ReplicationAdmin(Configuration conf) throws IOException { -118this.connection = ConnectionFactory.createCon

[01/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 0b74b2633 -> 2f960d3f3


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index b49f44a..a5c364f 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -319,11 +319,11 @@ under the License. -->
 
 
   
-  Copyright ©
2007–2016
+  Copyright ©
2007–2017
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-12-07
+  Last Published: 
2017-02-05
 
 
 



[02/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/book.html
--
diff --git a/book.html b/book.html
index f8bb073..af2676f 100644
--- a/book.html
+++ b/book.html
@@ -172,113 +172,112 @@
 89. Loading Coprocessors
 90. Examples
 91. Guidelines For 
Deploying A Coprocessor
-92. Monitor Time Spent in 
Coprocessors
-93. Restricting Coprocessor 
Usage
+92. Restricting Coprocessor 
Usage
 
 
 Apache HBase Performance Tuning
 
-94. Operating System
-95. Network
-96. Java
-97. HBase Configurations
-98. ZooKeeper
-99. Schema Design
-100. HBase General Patterns
-101. Writing to HBase
-102. Reading from HBase
-103. Deleting from HBase
-104. HDFS
-105. Amazon EC2
-106. Collocating HBase and 
MapReduce
-107. Case Studies
+93. Operating System
+94. Network
+95. Java
+96. HBase Configurations
+97. ZooKeeper
+98. Schema Design
+99. HBase General Patterns
+100. Writing to HBase
+101. Reading from HBase
+102. Deleting from HBase
+103. HDFS
+104. Amazon EC2
+105. Collocating HBase and 
MapReduce
+106. Case Studies
 
 
 Troubleshooting and Debugging Apache HBase
 
-108. General Guidelines
-109. Logs
-110. Resources
-111. Tools
-112. Client
-113. MapReduce
-114. NameNode
-115. Network
-116. RegionServer
-117. Master
-118. ZooKeeper
-119. Amazon EC2
-120. HBase and Hadoop version issues
-121. IPC Configuration 
Conflicts with Hadoop
-122. HBase and HDFS
-123. Running unit or integration tests
-124. Case Studies
-125. Cryptographic Features
-126. Operating System 
Specific Issues
-127. JDK Issues
+107. General Guidelines
+108. Logs
+109. Resources
+110. Tools
+111. Client
+112. MapReduce
+113. NameNode
+114. Network
+115. RegionServer
+116. Master
+117. ZooKeeper
+118. Amazon EC2
+119. HBase and Hadoop version issues
+120. IPC Configuration 
Conflicts with Hadoop
+121. HBase and HDFS
+122. Running unit or integration tests
+123. Case Studies
+124. Cryptographic Features
+125. Operating System 
Specific Issues
+126. JDK Issues
 
 
 Apache HBase Case Studies
 
-128. Overview
-129. Schema Design
-130. Performance/Troubleshooting
+127. Overview
+128. Schema Design
+129. Performance/Troubleshooting
 
 
 Apache HBase Operational Management
 
-131. HBase Tools and Utilities
-132. Region Management
-133. Node Management
-134. HBase Metrics
-135. HBase Monitoring
-136. Cluster Replication
-137. Running 
Multiple Workloads On a Single Cluster
-138. HBase Backup
-139. HBase Snapshots
-140. Storing Snapshots in Microsoft Azure Blob 
Storage
-141. Capacity Planning and Region Sizing
-142. Table Rename
+130. HBase Tools and Utilities
+131. Region Management
+132. Node Management
+133. HBase Metrics
+134. HBase Monitoring
+135. Cluster Replication
+136. Running 
Multiple Workloads On a Single Cluster
+137. HBase Backup
+138. HBase Snapshots
+139. Storing Snapshots in Microsoft Azure Blob 
Storage
+140. Capacity Planning and Region Sizing
+141. Table Rename
 
 
 Building and Developing Apache HBase
 
-143. Getting Involved
-144. Apache HBase Repositories
-145. IDEs
-146. Building Apache HBase
-147. Releasing Apache HBase
-148. Voting on Release Candidates
-149. Generating the HBase Reference Guide
-150. Updating http://hbase.apache.org";>hbase.apache.org
-151. Tests
-152. Developer Guidelines
+142. Getting Involved
+143. Apache HBase Repositories
+144. IDEs
+145. Building Apache HBase
+146. Releasing Apache HBase
+147. Voting on Release Candidates
+148. Generating the HBase Reference Guide
+149. Updating http://hbase.apache.org";>hbase.apache.org
+150. Tests
+151. Developer Guidelines
 
 
 Unit Testing HBase Applications
 
-153. JUnit
-154. Mockito
-155. MRUnit
-156. 
Integration Testing with an HBase Mini-Cluster
+152. JUnit
+153. Mockito
+154. MRUnit
+155. 
Integration Testing with an HBase Mini-Cluster
 
 
 Protobuf in HBase
 
-157. Protobuf
+156. Protobuf
 
 
 ZooKeeper
 
-158. Using existing 
ZooKeeper ensemble
-159. SASL Authentication with ZooKeeper
+157. Using existing 
ZooKeeper ensemble
+158. SASL Authentication with ZooKeeper
 
 
 Community
 
-160. Decisions
-161. Community Roles
-162. Commit Message format
+159. Decisions
+160. Community Roles
+161. Commit Message format
 
 
 Appendix
@@ -288,7 +287,7 @@
 Appendix C: hbck In Depth
 Appendix D: Access Control Matrix
 Appendix E: Compression and Data Block Encoding In 
HBase
-163. Enable Data Block 
Encoding
+162. Enable Data Block 
Encoding
 Appendix F: SQL over HBase
 Appendix G: YCSB
 Appendix H: HFile format
@@ -297,8 +296,8 @@
 Appendix K: HBase and the Apache Software 
Foundation
 Appendix L: Apache HBase Orca
 Appendix M: Enabling Dapper-like Tracing in 
HBase
-164. Client Modifications
-165. Tracing from HBase Shell
+163. Client Modifications
+164. Tracing from HBase Shell
 Appendix N: 0.95 RPC Specification
 
 
@@ -1768,6 +1767,24 @@ HBase 0.98 drops support for Hadoop 1.0, deprecates use 
of Hadoop 1.1+, and HBas
 
 
 
+Hadoop Pre-2.6.1 and JDK 1.8 Kerberos
+
+When using pre-2.6.1 Hadoop versions 

[06/52] [partial] hbase-site git commit: Published site at 26a94844f533b95db1f0a58d6a7cc3dc4a7a7098.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/62e361eb/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index 9a593f9..a8ad4e5 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -53,1235 +53,1251 @@
 045import 
java.util.concurrent.LinkedBlockingQueue;
 046import 
java.util.concurrent.ThreadPoolExecutor;
 047import java.util.concurrent.TimeUnit;
-048
-049import 
org.apache.commons.lang.mutable.MutableInt;
-050import org.apache.commons.logging.Log;
-051import 
org.apache.commons.logging.LogFactory;
-052import 
org.apache.hadoop.conf.Configuration;
-053import 
org.apache.hadoop.conf.Configured;
-054import org.apache.hadoop.fs.FileStatus;
-055import org.apache.hadoop.fs.FileSystem;
-056import org.apache.hadoop.fs.Path;
-057import 
org.apache.hadoop.fs.permission.FsPermission;
-058import 
org.apache.hadoop.hbase.HBaseConfiguration;
-059import 
org.apache.hadoop.hbase.HColumnDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.HTableDescriptor;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-065import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-066import 
org.apache.hadoop.hbase.client.Admin;
-067import 
org.apache.hadoop.hbase.client.ClientServiceCallable;
-068import 
org.apache.hadoop.hbase.client.Connection;
-069import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-070import 
org.apache.hadoop.hbase.client.RegionLocator;
-071import 
org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
-072import 
org.apache.hadoop.hbase.client.SecureBulkLoadClient;
-073import 
org.apache.hadoop.hbase.client.Table;
-074import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-075import 
org.apache.hadoop.hbase.io.HFileLink;
-076import 
org.apache.hadoop.hbase.io.HalfStoreFileReader;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-079import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-080import 
org.apache.hadoop.hbase.io.hfile.HFile;
-081import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-082import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-083import 
org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
-084import 
org.apache.hadoop.hbase.io.hfile.HFileScanner;
-085import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-086import 
org.apache.hadoop.hbase.regionserver.BloomType;
-087import 
org.apache.hadoop.hbase.regionserver.HStore;
-088import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-089import 
org.apache.hadoop.hbase.regionserver.StoreFileWriter;
-090import 
org.apache.hadoop.hbase.security.UserProvider;
-091import 
org.apache.hadoop.hbase.security.token.FsDelegationToken;
-092import 
org.apache.hadoop.hbase.util.Bytes;
-093import 
org.apache.hadoop.hbase.util.FSHDFSUtils;
-094import 
org.apache.hadoop.hbase.util.Pair;
-095import org.apache.hadoop.util.Tool;
-096import 
org.apache.hadoop.util.ToolRunner;
-097
-098import 
com.google.common.collect.HashMultimap;
-099import 
com.google.common.collect.Multimap;
-100import 
com.google.common.collect.Multimaps;
-101import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
-102
-103/**
-104 * Tool to load the output of 
HFileOutputFormat into an existing table.
-105 */
-106@InterfaceAudience.Public
-107@InterfaceStability.Stable
-108public class LoadIncrementalHFiles 
extends Configured implements Tool {
-109  private static final Log LOG = 
LogFactory.getLog(LoadIncrementalHFiles.class);
-110  private boolean initalized = false;
-111
-112  public static final String NAME = 
"completebulkload";
-113  public static final String 
MAX_FILES_PER_REGION_PER_FAMILY
-114= 
"hbase.mapreduce.bulkload.max.hfiles.perRegion.perFamily";
-115  private static final String 
ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
-116  public final static String 
CREATE_TABLE_CONF_KEY = "create.table";
-117  public final static String 
SILENCE_CONF_KEY = "ignore.unmatched.families";
-118  public final static String 
ALWAYS_COPY_FILES = "always.copy.files";
-119
-120  // We use a '.' prefix which is ignored 
when walking directory trees
-121  // above. It is invalid family name.
-122  final static String TMP_DIR = ".tmp";
-123
-124  private int 
maxFilesPerRegionPerFamily;
-125  private boolean assignSeqIds;
-126  private Set 
unmatchedFamilies = new HashSet();
-127
-128  // Source filesystem
-129  private FileSystem fs;
-130  // Source delegation