hbase git commit: HBASE-18418 Remove apache_hbase_topology from dev-support

2017-10-18 Thread dimaspivak
Repository: hbase
Updated Branches:
  refs/heads/master c16eb7881 -> 3acb08178


HBASE-18418 Remove apache_hbase_topology from dev-support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3acb0817
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3acb0817
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3acb0817

Branch: refs/heads/master
Commit: 3acb081787a4289d86d977db26bedaf6a42172ce
Parents: c16eb78
Author: Dima Spivak 
Authored: Thu Jul 20 10:08:11 2017 -0700
Committer: Dima Spivak 
Committed: Wed Oct 18 14:08:26 2017 -0700

--
 dev-support/apache_hbase_topology/Dockerfile|  24 --
 dev-support/apache_hbase_topology/README.md |  49 ---
 dev-support/apache_hbase_topology/__init__.py   |  15 -
 dev-support/apache_hbase_topology/actions.py| 421 ---
 .../apache_hbase_topology/configurations.cfg|  80 
 dev-support/apache_hbase_topology/profile.cfg   |  82 
 dev-support/apache_hbase_topology/ssh/id_rsa|  44 --
 .../apache_hbase_topology/ssh/id_rsa.pub|  18 -
 8 files changed, 733 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3acb0817/dev-support/apache_hbase_topology/Dockerfile
--
diff --git a/dev-support/apache_hbase_topology/Dockerfile 
b/dev-support/apache_hbase_topology/Dockerfile
deleted file mode 100644
index 714a55c..000
--- a/dev-support/apache_hbase_topology/Dockerfile
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-FROM debian:wheezy
-
-ENV TOPOLOGY_NAME=apache_hbase
-ADD . /root/clusterdock/clusterdock/topologies/${TOPOLOGY_NAME}
-
-RUN find /root -type f -name id_rsa -exec chmod 600 {} \;
-
-VOLUME /root/clusterdock/clusterdock/topologies/${TOPOLOGY_NAME}
-CMD ["/true"]

http://git-wip-us.apache.org/repos/asf/hbase/blob/3acb0817/dev-support/apache_hbase_topology/README.md
--
diff --git a/dev-support/apache_hbase_topology/README.md 
b/dev-support/apache_hbase_topology/README.md
deleted file mode 100644
index 018ee99..000
--- a/dev-support/apache_hbase_topology/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-
-# apache_hbase clusterdock topology
-
-## Overview
-*clusterdock* is a framework for creating Docker-based container clusters. 
Unlike regular Docker
-containers, which tend to run single processes and then exit once the process 
terminates, these
-container clusters are characterized by the execution of an init process in 
daemon mode. As such,
-the containers act more like "fat containers" or "light VMs;" entities with 
accessible IP addresses
-which emulate standalone hosts.
-
-*clusterdock* relies upon the notion of a topology to define how clusters 
should be built into
-images and then what to do with those images to start Docker container 
clusters.
-
-## Usage
-The *clusterdock* framework is designed to be run out of its own container 
while affecting
-operations on the host. To avoid problems that might result from incorrectly
-formatting this framework invocation, a Bash helper script (`clusterdock.sh`) 
can be sourced on a
-host that has Docker installed. Afterwards, running any of the binaries 
intended to carry
-out *clusterdock* actions can be done using the `clusterdock_run` command.
-```
-wget 
https://raw.githubusercontent.com/cloudera/clusterdock/master/clusterdock.sh
-# ALWAYS INSPECT SCRIPTS FROM THE INTERNET BEFORE SOURCING THEM.
-source clusterdock.sh
-```
-
-Since the *clusterdock* framework itself lives outside of Apache HBase, an 
environmental variable
-is used to let the helper script know where to find an image of the 
*apache_hbase* topology. To
-start a four-node Apache HBase cluster with default versions, you would simply 
run
-```
-CLUSTERDOCK_TOPOLOGY_IMAGE=apache_hbase_topology_location clusterdock_run \
-./bin/start_cluster apache_hbase --secondary-nodes='node-{2..4}'
-```


[45/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/Scan.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Scan.html 
b/apidocs/org/apache/hadoop/hbase/client/Scan.html
index 92fb615..3e29526 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Scan.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Scan.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -220,7 +220,7 @@ extends 
 
 Fields inherited from classorg.apache.hadoop.hbase.client.Query
-colFamTimeRangeMap,
 consistency,
 filter,
 targetReplicaId
+colFamTimeRangeMap,
 consistency,
 filter,
 loadColumnFamiliesOnDemand,
 targetReplicaId
 
 
 
@@ -302,181 +302,169 @@ extends 
 
 boolean
-doLoadColumnFamiliesOnDemand()
-Get the logical value indicating whether on-demand CF 
loading should be allowed.
-
-
-
-boolean
 getAllowPartialResults()
 
-
+
 int
 getBatch()
 
-
+
 boolean
 getCacheBlocks()
 Get whether blocks should be cached for this Scan.
 
 
-
+
 int
 getCaching()
 
-
+
 byte[][]
 getFamilies()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in 
java.util">NavigableSetbyte[]
 getFamilyMap()
 Getting the familyMap
 
 
-
+
 Filter
 getFilter()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 getFingerprint()
 Compile the table and column family (i.e.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-getLoadColumnFamiliesOnDemandValue()
-Get the raw loadColumnFamiliesOnDemand setting; if it's not 
set, can be null.
-
-
-
+
 long
 getMaxResultSize()
 
-
+
 int
 getMaxResultsPerColumnFamily()
 
-
+
 int
 getMaxVersions()
 
-
+
 int
 getRowOffsetPerColumnFamily()
 Method for retrieving the scan's offset per row per column
  family (#kvs to be skipped)
 
 
-
+
 org.apache.hadoop.hbase.client.metrics.ScanMetrics
 getScanMetrics()
 
-
+
 byte[]
 getStartRow()
 
-
+
 byte[]
 getStopRow()
 
-
+
 TimeRange
 getTimeRange()
 
-
+
 boolean
 hasFamilies()
 
-
+
 boolean
 hasFilter()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isAsyncPrefetch()
 
-
+
 boolean
 isGetScan()
 
-
+
 boolean
 isRaw()
 
-
+
 boolean
 isReversed()
 Get whether this scan is a reversed one.
 
 
-
+
 boolean
 isScanMetricsEnabled()
 
-
+
 boolean
 isSmall()
 Get whether this scan is a small scan
 
 
-
+
 int
 numFamilies()
 
-
+
 Scan
 setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms)
 
-
+
 Scan
 setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser,
   
org.apache.hadoop.hbase.security.access.Permissionperms)
 
-
+
 Scan
 setAllowPartialResults(booleanallowPartialResults)
 Setting whether the caller wants to see the partial results 
that may be returned from the
 

[44/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html
new file mode 100644
index 000..e95ecf0
--- /dev/null
+++ 
b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html
@@ -0,0 +1,169 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Interface 
org.apache.hadoop.hbase.client.AsyncTableRegionLocator (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses of 
Interfaceorg.apache.hadoop.hbase.client.AsyncTableRegionLocator
+
+
+
+
+
+Packages that use AsyncTableRegionLocator
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.client
+
+Provides HBase Client
+
+
+
+
+
+
+
+
+
+
+Uses of AsyncTableRegionLocator in org.apache.hadoop.hbase.client
+
+Methods in org.apache.hadoop.hbase.client
 that return AsyncTableRegionLocator
+
+Modifier and Type
+Method and Description
+
+
+
+AsyncTableRegionLocator
+AsyncConnection.getRegionLocator(TableNametableName)
+Retrieve a AsyncRegionLocator implementation to inspect 
region information on a table.
+
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 29f0c59..5510de2 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -255,6 +255,12 @@
 Deletes the specified cells/row.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+AsyncTable.delete(Deletedelete)
+Deletes the specified cells/row.
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
index 68e0643..00cda3f 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html
@@ -181,33 +181,37 @@
 
 
 Get
+Get.setLoadColumnFamiliesOnDemand(booleanvalue)
+
+
+Get
 Get.setMaxResultsPerColumnFamily(intlimit)
 Set the maximum number of values to return per row per 
Column Family
 
 
-
+
 Get
 Get.setMaxVersions()
 Get all available versions.
 
 
-
+
 Get
 Get.setMaxVersions(intmaxVersions)
 Get up to the specified number of versions of each 
column.
 
 
-
+
 Get
 Get.setReplicaId(intId)
 
-
+
 Get
 Get.setRowOffsetPerColumnFamily(intoffset)
 Set offset for the row per Column Family.
 
 
-
+
 Get
 Get.setTimeRange(longminStamp,
 longmaxStamp)
@@ -215,7 +219,7 @@
  [minStamp, maxStamp).
 
 
-
+
 Get
 Get.setTimeStamp(longtimestamp)
 Get versions of columns with the specified timestamp.
@@ -237,11 +241,23 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or 

[36/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
index 8b52a04..61a5457 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html
@@ -60,7 +60,7 @@
 052 */
 053@InterfaceAudience.Public
 054@InterfaceStability.Stable
-055final public class FilterList extends 
Filter {
+055final public class FilterList extends 
FilterBase {
 056  /** set operator */
 057  @InterfaceAudience.Public
 058  @InterfaceStability.Stable
@@ -73,7 +73,7 @@
 065
 066  private static final int 
MAX_LOG_FILTERS = 5;
 067  private Operator operator = 
Operator.MUST_PASS_ALL;
-068  private ListFilter filters = 
new ArrayListFilter();
+068  private final ListFilter 
filters;
 069  private Filter seekHintFilter = null;
 070
 071  /** Reference Cell used by {@link 
#transformCell(Cell)} for validation purpose. */
@@ -91,423 +91,476 @@
 083  /**
 084   * Constructor that takes a set of 
{@link Filter}s. The default operator
 085   * MUST_PASS_ALL is assumed.
-086   *
+086   * All filters are cloned to internal 
list.
 087   * @param rowFilters list of filters
 088   */
 089  public FilterList(final 
ListFilter rowFilters) {
-090if (rowFilters instanceof ArrayList) 
{
-091  this.filters = rowFilters;
-092} else {
-093  this.filters = new 
ArrayListFilter(rowFilters);
-094}
-095  }
-096
-097  /**
-098   * Constructor that takes a var arg 
number of {@link Filter}s. The fefault operator
-099   * MUST_PASS_ALL is assumed.
-100   * @param rowFilters
-101   */
-102  public FilterList(final Filter... 
rowFilters) {
-103this.filters = new 
ArrayListFilter(Arrays.asList(rowFilters));
-104  }
-105
-106  /**
-107   * Constructor that takes an 
operator.
-108   *
-109   * @param operator Operator to process 
filter set with.
-110   */
-111  public FilterList(final Operator 
operator) {
-112this.operator = operator;
-113  }
-114
-115  /**
-116   * Constructor that takes a set of 
{@link Filter}s and an operator.
-117   *
-118   * @param operator Operator to process 
filter set with.
-119   * @param rowFilters Set of row 
filters.
-120   */
-121  public FilterList(final Operator 
operator, final ListFilter rowFilters) {
-122this.filters = new 
ArrayListFilter(rowFilters);
-123this.operator = operator;
-124  }
-125
-126  /**
-127   * Constructor that takes a var arg 
number of {@link Filter}s and an operator.
-128   *
-129   * @param operator Operator to process 
filter set with.
-130   * @param rowFilters Filters to use
-131   */
-132  public FilterList(final Operator 
operator, final Filter... rowFilters) {
-133this.filters = new 
ArrayListFilter(Arrays.asList(rowFilters));
-134this.operator = operator;
-135  }
-136
-137  /**
-138   * Get the operator.
-139   *
-140   * @return operator
-141   */
-142  public Operator getOperator() {
-143return operator;
-144  }
-145
-146  /**
-147   * Get the filters.
-148   *
-149   * @return filters
-150   */
-151  public ListFilter getFilters() 
{
-152return filters;
-153  }
-154
-155  /**
-156   * Add a filter.
-157   *
-158   * @param filter another filter
-159   */
-160  public void addFilter(Filter filter) 
{
-161if (this.isReversed() != 
filter.isReversed()) {
-162  throw new 
IllegalArgumentException(
-163  "Filters in the list must have 
the same reversed flag, this.reversed="
-164  + this.isReversed());
-165}
-166this.filters.add(filter);
-167  }
-168
-169  @Override
-170  public void reset() throws IOException 
{
-171int listize = filters.size();
-172for (int i = 0; i  listize; i++) 
{
-173  filters.get(i).reset();
-174}
-175seekHintFilter = null;
-176  }
-177
-178  @Override
-179  public boolean filterRowKey(byte[] 
rowKey, int offset, int length) throws IOException {
-180boolean flag = (this.operator == 
Operator.MUST_PASS_ONE) ? true : false;
-181int listize = filters.size();
-182for (int i = 0; i  listize; i++) 
{
-183  Filter filter = filters.get(i);
-184  if (this.operator == 
Operator.MUST_PASS_ALL) {
-185if (filter.filterAllRemaining() 
||
-186filter.filterRowKey(rowKey, 
offset, length)) {
-187  flag =  true;
-188}
-189  } else if (this.operator == 
Operator.MUST_PASS_ONE) {
-190if (!filter.filterAllRemaining() 

-191!filter.filterRowKey(rowKey, 
offset, length)) {
-192  flag =  false;
-193}
-194  }
-195}
-196return flag;
-197  }
-198
-199  @Override
-200  public boolean filterRowKey(Cell 
firstRowCell) throws IOException {
-201boolean flag = (this.operator == 

[52/52] hbase-site git commit: Empty commit

2016-10-17 Thread dimaspivak
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/9d13f2d9
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/9d13f2d9
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/9d13f2d9

Branch: refs/heads/asf-site
Commit: 9d13f2d98f82b59344783be0545b30e1bcc767d7
Parents: f07ee53
Author: Dima Spivak 
Authored: Mon Oct 17 14:26:32 2016 -0700
Committer: Dima Spivak 
Committed: Mon Oct 17 14:26:32 2016 -0700

--

--




[35/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
index 8b52a04..61a5457 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html
@@ -60,7 +60,7 @@
 052 */
 053@InterfaceAudience.Public
 054@InterfaceStability.Stable
-055final public class FilterList extends 
Filter {
+055final public class FilterList extends 
FilterBase {
 056  /** set operator */
 057  @InterfaceAudience.Public
 058  @InterfaceStability.Stable
@@ -73,7 +73,7 @@
 065
 066  private static final int 
MAX_LOG_FILTERS = 5;
 067  private Operator operator = 
Operator.MUST_PASS_ALL;
-068  private ListFilter filters = 
new ArrayListFilter();
+068  private final ListFilter 
filters;
 069  private Filter seekHintFilter = null;
 070
 071  /** Reference Cell used by {@link 
#transformCell(Cell)} for validation purpose. */
@@ -91,423 +91,476 @@
 083  /**
 084   * Constructor that takes a set of 
{@link Filter}s. The default operator
 085   * MUST_PASS_ALL is assumed.
-086   *
+086   * All filters are cloned to internal 
list.
 087   * @param rowFilters list of filters
 088   */
 089  public FilterList(final 
ListFilter rowFilters) {
-090if (rowFilters instanceof ArrayList) 
{
-091  this.filters = rowFilters;
-092} else {
-093  this.filters = new 
ArrayListFilter(rowFilters);
-094}
-095  }
-096
-097  /**
-098   * Constructor that takes a var arg 
number of {@link Filter}s. The fefault operator
-099   * MUST_PASS_ALL is assumed.
-100   * @param rowFilters
-101   */
-102  public FilterList(final Filter... 
rowFilters) {
-103this.filters = new 
ArrayListFilter(Arrays.asList(rowFilters));
-104  }
-105
-106  /**
-107   * Constructor that takes an 
operator.
-108   *
-109   * @param operator Operator to process 
filter set with.
-110   */
-111  public FilterList(final Operator 
operator) {
-112this.operator = operator;
-113  }
-114
-115  /**
-116   * Constructor that takes a set of 
{@link Filter}s and an operator.
-117   *
-118   * @param operator Operator to process 
filter set with.
-119   * @param rowFilters Set of row 
filters.
-120   */
-121  public FilterList(final Operator 
operator, final ListFilter rowFilters) {
-122this.filters = new 
ArrayListFilter(rowFilters);
-123this.operator = operator;
-124  }
-125
-126  /**
-127   * Constructor that takes a var arg 
number of {@link Filter}s and an operator.
-128   *
-129   * @param operator Operator to process 
filter set with.
-130   * @param rowFilters Filters to use
-131   */
-132  public FilterList(final Operator 
operator, final Filter... rowFilters) {
-133this.filters = new 
ArrayListFilter(Arrays.asList(rowFilters));
-134this.operator = operator;
-135  }
-136
-137  /**
-138   * Get the operator.
-139   *
-140   * @return operator
-141   */
-142  public Operator getOperator() {
-143return operator;
-144  }
-145
-146  /**
-147   * Get the filters.
-148   *
-149   * @return filters
-150   */
-151  public ListFilter getFilters() 
{
-152return filters;
-153  }
-154
-155  /**
-156   * Add a filter.
-157   *
-158   * @param filter another filter
-159   */
-160  public void addFilter(Filter filter) 
{
-161if (this.isReversed() != 
filter.isReversed()) {
-162  throw new 
IllegalArgumentException(
-163  "Filters in the list must have 
the same reversed flag, this.reversed="
-164  + this.isReversed());
-165}
-166this.filters.add(filter);
-167  }
-168
-169  @Override
-170  public void reset() throws IOException 
{
-171int listize = filters.size();
-172for (int i = 0; i  listize; i++) 
{
-173  filters.get(i).reset();
-174}
-175seekHintFilter = null;
-176  }
-177
-178  @Override
-179  public boolean filterRowKey(byte[] 
rowKey, int offset, int length) throws IOException {
-180boolean flag = (this.operator == 
Operator.MUST_PASS_ONE) ? true : false;
-181int listize = filters.size();
-182for (int i = 0; i  listize; i++) 
{
-183  Filter filter = filters.get(i);
-184  if (this.operator == 
Operator.MUST_PASS_ALL) {
-185if (filter.filterAllRemaining() 
||
-186filter.filterRowKey(rowKey, 
offset, length)) {
-187  flag =  true;
-188}
-189  } else if (this.operator == 
Operator.MUST_PASS_ONE) {
-190if (!filter.filterAllRemaining() 

-191!filter.filterRowKey(rowKey, 
offset, length)) {
-192  flag =  false;
-193}
-194  }
-195}
-196return flag;
-197  }
-198
-199  @Override
-200  public boolean filterRowKey(Cell 
firstRowCell) throws IOException {
-201boolean flag = (this.operator == 
Operator.MUST_PASS_ONE) ? true : false;
+090   

[41/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
index 742a58d..4c2f737 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
@@ -183,6 +183,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 protected Pairbyte[][],byte[][]
 TableInputFormatBase.getStartEndKeys()
 
+
+protected Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+LoadIncrementalHFiles.groupOrSplit(com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups,
+
org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemitem,
+Tabletable,
+Pairbyte[][],byte[][]startEndKeys)
+Attempt to assign the given load queue item into its target 
region group.
+
+
 
 
 
@@ -193,7 +202,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem
+protected Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 LoadIncrementalHFiles.groupOrSplit(com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups,
 
org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemitem,
 Tabletable,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/overview-frame.html
--
diff --git a/apidocs/overview-frame.html b/apidocs/overview-frame.html
index cf4f3d4..14765bf 100644
--- a/apidocs/overview-frame.html
+++ b/apidocs/overview-frame.html
@@ -47,7 +47,6 @@
 org.apache.hadoop.hbase.master
 org.apache.hadoop.hbase.mob
 org.apache.hadoop.hbase.mob.compactions
-org.apache.hadoop.hbase.mob.mapreduce
 org.apache.hadoop.hbase.namespace
 org.apache.hadoop.hbase.nio
 org.apache.hadoop.hbase.quotas

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/overview-summary.html
--
diff --git a/apidocs/overview-summary.html b/apidocs/overview-summary.html
index 65cd8e2..2d1d380 100644
--- a/apidocs/overview-summary.html
+++ b/apidocs/overview-summary.html
@@ -242,86 +242,82 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.namespace
 
 
-
+
 org.apache.hadoop.hbase.nio
 
 
-
+
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher
 
 
-
+
 org.apache.hadoop.hbase.regionserver.throttle
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security
 
 
-
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf
 
 
-
+
 org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.spark
 
 
-
+
 org.apache.hadoop.hbase.spark.example.hbasecontext
 
 
-
+
 org.apache.hadoop.hbase.types
 
 
@@ -329,23 +325,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  extensible data type API.
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.util.hbck
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
-
+
 org.apache.hbase.archetypes.exemplars.client
 
 This package provides fully-functional exemplar Java code 

[47/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html
new file mode 100644
index 000..a02115c
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html
@@ -0,0 +1,454 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+AsyncTable (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Interface AsyncTable
+
+
+
+
+
+
+
+@InterfaceAudience.Public
+ @InterfaceStability.Unstable
+public interface AsyncTable
+The asynchronous version of Table. Obtain an instance from 
a AsyncConnection.
+ 
+ The implementation is NOT required to be thread safe. Do NOT access it from 
multiple threads
+ concurrently.
+ 
+ Usually the implementations will not throw any exception directly, you need 
to get the exception
+ from the returned http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFuture.
+
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsAbstract Methods
+
+Modifier and Type
+Method and Description
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+delete(Deletedelete)
+Deletes the specified cells/row.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+exists(Getget)
+Test for the existence of columns in the table, as 
specified by the Get.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureResult
+get(Getget)
+Extracts certain cells from a given row.
+
+
+
+org.apache.hadoop.conf.Configuration
+getConfiguration()
+Returns the Configuration object used by this 
instance.
+
+
+
+TableName
+getName()
+Gets the fully qualified table name instance of this 
table.
+
+
+
+long
+getOperationTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each operation in Table instance.
+
+
+
+long
+getReadRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each rpc read request in this Table 
instance.
+
+
+
+long
+getWriteRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each rpc write request in this Table 
instance.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+put(Putput)
+Puts some data to the table.
+
+
+
+void
+setOperationTimeout(longtimeout,
+   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Set timeout of each operation in 

[51/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/f07ee53f
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/f07ee53f
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/f07ee53f

Branch: refs/heads/asf-site
Commit: f07ee53f300b8457f08319a9e1b546b53a0a69a9
Parents: 344fa32
Author: jenkins 
Authored: Mon Oct 17 14:55:12 2016 +
Committer: Dima Spivak 
Committed: Mon Oct 17 14:20:11 2016 -0700

--
 acid-semantics.html |  4 +-
 apache_hbase_reference_guide.pdf| 722473 +---
 apache_hbase_reference_guide.pdfmarks   | 14 +-
 apidocs/allclasses-frame.html   |  4 +-
 apidocs/allclasses-noframe.html |  4 +-
 apidocs/constant-values.html| 19 +
 apidocs/index-all.html  |144 +-
 .../org/apache/hadoop/hbase/ProcedureInfo.html  | 21 +-
 .../hadoop/hbase/class-use/HRegionLocation.html | 13 +
 .../hadoop/hbase/class-use/TableName.html   | 26 +-
 .../hbase/class-use/TableNotFoundException.html |  2 +-
 .../org/apache/hadoop/hbase/client/Append.html  |  4 +-
 .../hadoop/hbase/client/AsyncConnection.html|294 +
 .../apache/hadoop/hbase/client/AsyncTable.html  |454 +
 .../hbase/client/AsyncTableRegionLocator.html   |278 +
 .../apache/hadoop/hbase/client/Attributes.html  |  4 +-
 .../BufferedMutator.ExceptionListener.html  |  4 +-
 .../hadoop/hbase/client/BufferedMutator.html| 36 +-
 .../hadoop/hbase/client/ConnectionFactory.html  |167 +-
 apidocs/org/apache/hadoop/hbase/client/Get.html |131 +-
 .../org/apache/hadoop/hbase/client/Query.html   |111 +-
 .../org/apache/hadoop/hbase/client/Scan.html|211 +-
 .../hadoop/hbase/client/class-use/Admin.html|  2 +-
 .../hbase/client/class-use/AsyncConnection.html |183 +
 .../hbase/client/class-use/AsyncTable.html  |169 +
 .../class-use/AsyncTableRegionLocator.html  |169 +
 .../hadoop/hbase/client/class-use/Delete.html   |  6 +
 .../hadoop/hbase/client/class-use/Get.html  | 28 +-
 .../hadoop/hbase/client/class-use/Put.html  |  8 +-
 .../hadoop/hbase/client/class-use/Query.html|  7 +
 .../hbase/client/class-use/RegionLocator.html   |  2 +-
 .../hadoop/hbase/client/class-use/Result.html   | 15 +
 .../hadoop/hbase/client/class-use/Scan.html |  5 +-
 .../hadoop/hbase/client/class-use/Table.html|  4 +-
 .../hadoop/hbase/client/package-frame.html  |  3 +
 .../hadoop/hbase/client/package-summary.html| 34 +-
 .../hadoop/hbase/client/package-tree.html   |  3 +
 .../apache/hadoop/hbase/client/package-use.html | 81 +-
 .../org/apache/hadoop/hbase/filter/Filter.html  |  2 +-
 .../apache/hadoop/hbase/filter/FilterList.html  |262 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |  6 +-
 .../hadoop/hbase/filter/package-tree.html   |  2 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  | 68 +-
 .../hbase/mob/compactions/package-summary.html  |  4 +-
 .../hbase/mob/compactions/package-tree.html |  4 +-
 .../hadoop/hbase/mob/mapreduce/Sweeper.html |333 -
 .../hbase/mob/mapreduce/class-use/Sweeper.html  |125 -
 .../hbase/mob/mapreduce/package-frame.html  | 20 -
 .../hbase/mob/mapreduce/package-summary.html|145 -
 .../hbase/mob/mapreduce/package-tree.html   |142 -
 .../hadoop/hbase/mob/mapreduce/package-use.html |125 -
 .../hadoop/hbase/namespace/package-summary.html |  4 +-
 .../hadoop/hbase/namespace/package-tree.html|  4 +-
 .../hadoop/hbase/security/class-use/User.html   |  9 +-
 .../org/apache/hadoop/hbase/util/Counter.html   | 36 +-
 .../hadoop/hbase/util/class-use/Pair.html   | 11 +-
 apidocs/overview-frame.html |  1 -
 apidocs/overview-summary.html   | 52 +-
 apidocs/overview-tree.html  |  8 +-
 apidocs/package-list|  1 -
 .../org/apache/hadoop/hbase/ProcedureInfo.html  |  2 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |  2 +-
 .../hadoop/hbase/client/AsyncConnection.html|134 +
 .../apache/hadoop/hbase/client/AsyncTable.html  |198 +
 .../hbase/client/AsyncTableRegionLocator.html   |132 +
 .../BufferedMutator.ExceptionListener.html  | 26 +-
 .../hadoop/hbase/client/BufferedMutator.html| 26 +-
 .../hadoop/hbase/client/ConnectionFactory.html  |461 +-
 .../org/apache/hadoop/hbase/client/Get.html |783 +-
 ...ableMultiplexer.HTableMultiplexerStatus.html |410 +-
 .../hadoop/hbase/client/HTableMultiplexer.html  |410 +-
 

[43/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/FilterList.html 
b/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
index 8bf8c8b..2a50d5b 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/FilterList.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -103,12 +103,17 @@ var activeTableTab = "activeTableTab";
 org.apache.hadoop.hbase.filter.Filter
 
 
+org.apache.hadoop.hbase.filter.FilterBase
+
+
 org.apache.hadoop.hbase.filter.FilterList
 
 
 
 
 
+
+
 
 
 
@@ -117,7 +122,7 @@ var activeTableTab = "activeTableTab";
 @InterfaceAudience.Public
  @InterfaceStability.Stable
 public final class FilterList
-extends Filter
+extends org.apache.hadoop.hbase.filter.FilterBase
 Implementation of Filter that represents an 
ordered List of Filters
  which will be evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL
  (AND) or FilterList.Operator.MUST_PASS_ONE
 (OR).
@@ -242,117 +247,134 @@ extends 
 
 
+void
+addFilter(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListFilterfilters)
+
+
 boolean
 filterAllRemaining()
-If this returns true, the scan will terminate.
+Filters that never filter all remaining can inherit this 
implementation that
+ never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 boolean
 filterRow()
-Last chance to veto row based on previous Filter.filterKeyValue(Cell)
 calls.
+Filters that never filter by rows based on previously 
gathered state from
+ Filter.filterKeyValue(Cell)
 can inherit this implementation that
+ never filters a row.
 
 
-
+
 void
 filterRowCells(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellcells)
 Filters that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
 boolean
 filterRowKey(byte[]rowKey,
 intoffset,
 intlength)
-Filters a row based on the row key.
+Filters that do not filter by row key can inherit this 
implementation that
+ never filters anything.
 
 
-
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListFilter
 getFilters()
 Get the filters.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
-If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
- the next key it must seek to.
+Filters that are not sure which key must be next seeked to, 
can inherit
+ this implementation that, by default, returns a null Cell.
 
 
-
+
 FilterList.Operator
 getOperator()
 Get the operator.
 
 
-
+
 boolean
 hasFilterRow()
-Primarily used to check for conflicts with scans(such as 
scans that do not read a full row at a
- time).
+Fitlers that never filter by modifying the returned List of 
Cells can
+ inherit this implementation that does nothing.
 
 
-
+
 boolean
 isFamilyEssential(byte[]name)
-Check that given column family is essential for filter to 
check row.
+By default, we require all scan's column families to be 
present.
 
 
-
+
 static FilterList
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
-Reset the state of the filter between rows.
+Filters that are purely stateless and do nothing in their 
reset() methods can inherit
+ this null/empty implementation.
 
 
-
+
 void
 setReversed(booleanreversed)
 alter the reversed scan flag
 
 
-
+
 byte[]
 toByteArray()
-TODO: JAVADOC
- 
- Concrete implementers can signal a failure condition in their code by 
throwing an
- http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException.
+Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-toString()
+toString()
+Return filter's info for debugging and logging 
purpose.
+
 

[40/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html
index 01681a9..16beebf 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html
@@ -36,212 +36,265 @@
 028import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 029import 
org.apache.hadoop.hbase.security.User;
 030import 
org.apache.hadoop.hbase.security.UserProvider;
-031
+031import 
org.apache.hadoop.hbase.util.ReflectionUtils;
 032
-033/**
-034 * A non-instantiable class that manages 
creation of {@link Connection}s.
-035 * Managing the lifecycle of the {@link 
Connection}s to the cluster is the responsibility of
-036 * the caller.
-037 * From a {@link Connection}, {@link 
Table} implementations are retrieved
-038 * with {@link 
Connection#getTable(TableName)}. Example:
-039 * pre
-040 * Connection connection = 
ConnectionFactory.createConnection(config);
-041 * Table table = 
connection.getTable(TableName.valueOf("table1"));
-042 * try {
-043 *   // Use the table as needed, for a 
single operation and a single thread
-044 * } finally {
-045 *   table.close();
-046 *   connection.close();
-047 * }
-048 * /pre
-049 *
-050 * Similarly, {@link Connection} also 
returns {@link Admin} and {@link RegionLocator}
-051 * implementations.
-052 *
-053 * @see Connection
-054 * @since 0.99.0
-055 */
-056@InterfaceAudience.Public
-057@InterfaceStability.Evolving
-058public class ConnectionFactory {
-059
-060  /** No public c.tors */
-061  protected ConnectionFactory() {
-062  }
+033
+034/**
+035 * A non-instantiable class that manages 
creation of {@link Connection}s.
+036 * Managing the lifecycle of the {@link 
Connection}s to the cluster is the responsibility of
+037 * the caller.
+038 * From a {@link Connection}, {@link 
Table} implementations are retrieved
+039 * with {@link 
Connection#getTable(TableName)}. Example:
+040 * pre
+041 * Connection connection = 
ConnectionFactory.createConnection(config);
+042 * Table table = 
connection.getTable(TableName.valueOf("table1"));
+043 * try {
+044 *   // Use the table as needed, for a 
single operation and a single thread
+045 * } finally {
+046 *   table.close();
+047 *   connection.close();
+048 * }
+049 * /pre
+050 *
+051 * Similarly, {@link Connection} also 
returns {@link Admin} and {@link RegionLocator}
+052 * implementations.
+053 *
+054 * @see Connection
+055 * @since 0.99.0
+056 */
+057@InterfaceAudience.Public
+058@InterfaceStability.Evolving
+059public class ConnectionFactory {
+060
+061  public static final String 
HBASE_CLIENT_ASYNC_CONNECTION_IMPL =
+062  
"hbase.client.async.connection.impl";
 063
-064  /**
-065   * Create a new Connection instance 
using default HBaseConfiguration. Connection
-066   * encapsulates all housekeeping for a 
connection to the cluster. All tables and interfaces
-067   * created from returned connection 
share zookeeper connection, meta cache, and connections
-068   * to region servers and masters.
-069   * br
-070   * The caller is responsible for 
calling {@link Connection#close()} on the returned
-071   * connection instance.
-072   *
-073   * Typical usage:
-074   * pre
-075   * Connection connection = 
ConnectionFactory.createConnection();
-076   * Table table = 
connection.getTable(TableName.valueOf("mytable"));
-077   * try {
-078   *   table.get(...);
-079   *   ...
-080   * } finally {
-081   *   table.close();
-082   *   connection.close();
-083   * }
-084   * /pre
-085   *
-086   * @return Connection object for 
codeconf/code
-087   */
-088  public static Connection 
createConnection() throws IOException {
-089return 
createConnection(HBaseConfiguration.create(), null, null);
-090  }
-091
-092  /**
-093   * Create a new Connection instance 
using the passed codeconf/code instance. Connection
-094   * encapsulates all housekeeping for a 
connection to the cluster. All tables and interfaces
-095   * created from returned connection 
share zookeeper connection, meta cache, and connections
-096   * to region servers and masters.
-097   * br
-098   * The caller is responsible for 
calling {@link Connection#close()} on the returned
-099   * connection instance.
-100   *
-101   * Typical usage:
-102   * pre
-103   * Connection connection = 
ConnectionFactory.createConnection(conf);
-104   * Table table = 
connection.getTable(TableName.valueOf("mytable"));
-105   * try {
-106   *   table.get(...);
-107   *   ...
-108   * } finally {
-109   *   table.close();
-110   *   connection.close();
-111   * }
-112   * /pre
-113   *
-114   * @param conf configuration
-115   * @return Connection object for 
codeconf/code
-116   */
-117  public static 

[37/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
index 417e13a..13eb321 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
@@ -52,7 +52,7 @@
 044  protected int targetReplicaId = -1;
 045  protected Consistency consistency = 
Consistency.STRONG;
 046  protected Mapbyte[], TimeRange 
colFamTimeRangeMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
-047
+047  protected Boolean 
loadColumnFamiliesOnDemand = null;
 048  /**
 049   * @return Filter
 050   */
@@ -187,32 +187,67 @@
 179  
IsolationLevel.fromBytes(attr);
 180  }
 181
-182
-183  /**
-184   * Get versions of columns only within 
the specified timestamp range,
-185   * [minStamp, maxStamp) on a per CF 
bases.  Note, default maximum versions to return is 1.  If
-186   * your time range spans more than one 
version and you want all versions
-187   * returned, up the number of versions 
beyond the default.
-188   * Column Family time ranges take 
precedence over the global time range.
-189   *
-190   * @param cf   the column family 
for which you want to restrict
-191   * @param minStamp minimum timestamp 
value, inclusive
-192   * @param maxStamp maximum timestamp 
value, exclusive
-193   * @return this
-194   */
-195
-196  public Query 
setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) {
-197colFamTimeRangeMap.put(cf, new 
TimeRange(minStamp, maxStamp));
-198return this;
-199  }
-200
-201  /**
-202   * @return A map of column families to 
time ranges
-203   */
-204  public Mapbyte[], TimeRange 
getColumnFamilyTimeRange() {
-205return this.colFamTimeRangeMap;
-206  }
-207}
+182  /**
+183   * Set the value indicating whether 
loading CFs on demand should be allowed (cluster
+184   * default is false). On-demand CF 
loading doesn't load column families until necessary, e.g.
+185   * if you filter on one column, the 
other column family data will be loaded only for the rows
+186   * that are included in result, not all 
rows like in normal case.
+187   * With column-specific filters, like 
SingleColumnValueFilter w/filterIfMissing == true,
+188   * this can deliver huge perf gains 
when there's a cf with lots of data; however, it can
+189   * also lead to some inconsistent 
results, as follows:
+190   * - if someone does a concurrent 
update to both column families in question you may get a row
+191   *   that never existed, e.g. for { 
rowKey = 5, { cat_videos =gt; 1 }, { video =gt; "my cat" } }
+192   *   someone puts rowKey 5 with { 
cat_videos =gt; 0 }, { video =gt; "my dog" }, concurrent scan
+193   *   filtering on "cat_videos == 1" can 
get { rowKey = 5, { cat_videos =gt; 1 },
+194   *   { video =gt; "my dog" } }.
+195   * - if there's a concurrent split and 
you have more than 2 column families, some rows may be
+196   *   missing some column families.
+197   */
+198  public Query 
setLoadColumnFamiliesOnDemand(boolean value) {
+199this.loadColumnFamiliesOnDemand = 
value;
+200return this;
+201  }
+202
+203  /**
+204   * Get the raw 
loadColumnFamiliesOnDemand setting; if it's not set, can be null.
+205   */
+206  public Boolean 
getLoadColumnFamiliesOnDemandValue() {
+207return 
this.loadColumnFamiliesOnDemand;
+208  }
+209
+210  /**
+211   * Get the logical value indicating 
whether on-demand CF loading should be allowed.
+212   */
+213  public boolean 
doLoadColumnFamiliesOnDemand() {
+214return 
(this.loadColumnFamiliesOnDemand != null)
+215   
this.loadColumnFamiliesOnDemand;
+216  }
+217
+218  /**
+219   * Get versions of columns only within 
the specified timestamp range,
+220   * [minStamp, maxStamp) on a per CF 
bases.  Note, default maximum versions to return is 1.  If
+221   * your time range spans more than one 
version and you want all versions
+222   * returned, up the number of versions 
beyond the default.
+223   * Column Family time ranges take 
precedence over the global time range.
+224   *
+225   * @param cf   the column family 
for which you want to restrict
+226   * @param minStamp minimum timestamp 
value, inclusive
+227   * @param maxStamp maximum timestamp 
value, exclusive
+228   * @return this
+229   */
+230
+231  public Query 
setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) {
+232colFamTimeRangeMap.put(cf, new 
TimeRange(minStamp, maxStamp));
+233return this;
+234  }
+235
+236  /**
+237   * @return A map of column families to 
time ranges
+238   */
+239  public Mapbyte[], TimeRange 
getColumnFamilyTimeRange() {
+240return this.colFamTimeRangeMap;
+241  }
+242}
 
 
 


[25/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html
index f600bb9..9dd3cf6 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public static class KeyValue.RawBytesComparator
+public static class KeyValue.RawBytesComparator
 extends KeyValue.KVComparator
 
 
@@ -250,7 +250,7 @@ extends 
 
 RawBytesComparator
-publicRawBytesComparator()
+publicRawBytesComparator()
 Deprecated.
 
 
@@ -268,7 +268,7 @@ extends 
 
 getLegacyKeyComparatorName
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetLegacyKeyComparatorName()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetLegacyKeyComparatorName()
 Deprecated.
 The HFileV2 file format's trailer contains this class name. 
 We reinterpret this and
  instantiate the appropriate comparator.
@@ -288,7 +288,7 @@ extends 
 compareFlatKey
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicintcompareFlatKey(byte[]left,
+publicintcompareFlatKey(byte[]left,
   intloffset,
   intllength,
   byte[]right,
@@ -312,7 +312,7 @@ publicint
 
 compare
-publicintcompare(Cellleft,
+publicintcompare(Cellleft,
Cellright)
 Deprecated.
 Description copied from 
class:KeyValue.KVComparator
@@ -332,7 +332,7 @@ publicint
 
 compareOnlyKeyPortion
-publicintcompareOnlyKeyPortion(Cellleft,
+publicintcompareOnlyKeyPortion(Cellleft,
  Cellright)
 Deprecated.
 
@@ -347,7 +347,7 @@ publicint
 
 calcIndexKey
-publicbyte[]calcIndexKey(byte[]lastKeyOfPreviousBlock,
+publicbyte[]calcIndexKey(byte[]lastKeyOfPreviousBlock,
byte[]firstKeyInBlock)
 Deprecated.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
index ddee06b..dfd9d4e 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class KeyValue.RowOnlyComparator
+public static class KeyValue.RowOnlyComparator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorKeyValue
 Comparator that compares row component only of a 
KeyValue.
@@ -215,7 +215,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 comparator
-finalKeyValue.KVComparator comparator
+finalKeyValue.KVComparator comparator
 
 
 
@@ -232,7 +232,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 RowOnlyComparator
-publicRowOnlyComparator(KeyValue.KVComparatorc)
+publicRowOnlyComparator(KeyValue.KVComparatorc)
 
 
 
@@ -249,7 +249,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 compare
-publicintcompare(KeyValueleft,
+publicintcompare(KeyValueleft,
KeyValueright)
 
 Specified by:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
index 14fa975..8f6c085 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static interface KeyValue.SamePrefixComparatorT
+public static interface KeyValue.SamePrefixComparatorT
 Avoids redundant comparisons for better 

[49/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 41cd630..6603fac 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -1,30 +1,28 @@
 %PDF-1.4
 %����
 1 0 obj
-<< /Title 

+<< /Title (Apache HBase  Reference Guide)
 /Author (Apache HBase Team)
-/Creator (Asciidoctor PDF 1.5.0.alpha.11, based on Prawn 1.3.0)
+/Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20161009074353+00'00')
-/ModDate (D:20161009074353+00'00')
+/CreationDate (D:20161017144413+00'00')
+/ModDate (D:20161017144413+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
-/Names 24 0 R
-/Outlines 4237 0 R
-/PageLabels 4443 0 R
+/Names 25 0 R
+/Outlines 4048 0 R
+/PageLabels 4255 0 R
 /PageMode /UseOutlines
-/OpenAction [7 0 R /FitH 842.89]
-/ViewerPreferences << /DisplayDocTitle true
->>
+/ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 638
-/Kids [7 0 R 10 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 41 0 R 45 0 R 49 
0 R 62 0 R 65 0 R 68 0 R 70 0 R 72 0 R 80 0 R 83 0 R 87 0 R 91 0 R 94 0 R 96 0 
R 101 0 R 109 0 R 114 0 R 116 0 R 139 0 R 145 0 R 152 0 R 154 0 R 160 0 R 163 0 
R 175 0 R 181 0 R 196 0 R 200 0 R 204 0 R 206 0 R 210 0 R 221 0 R 228 0 R 237 0 
R 244 0 R 251 0 R 259 0 R 267 0 R 279 0 R 288 0 R 296 0 R 305 0 R 313 0 R 320 0 
R 328 0 R 334 0 R 341 0 R 349 0 R 358 0 R 366 0 R 374 0 R 383 0 R 392 0 R 401 0 
R 408 0 R 416 0 R 424 0 R 433 0 R 441 0 R 448 0 R 457 0 R 469 0 R 475 0 R 483 0 
R 489 0 R 498 0 R 506 0 R 514 0 R 518 0 R 521 0 R 526 0 R 532 0 R 548 0 R 558 0 
R 563 0 R 574 0 R 583 0 R 588 0 R 590 0 R 592 0 R 603 0 R 609 0 R 614 0 R 621 0 
R 633 0 R 645 0 R 662 0 R 676 0 R 686 0 R 688 0 R 690 0 R 700 0 R 712 0 R 722 0 
R 732 0 R 736 0 R 739 0 R 743 0 R 747 0 R 750 0 R 753 0 R 755 0 R 758 0 R 762 0 
R 764 0 R 769 0 R 772 0 R 780 0 R 783 0 R 785 0 R 791 0 R 793 0 R 797 0 R 806 0 
R 808 0 R 811 0 R 814 0 R 817 0 R 820 0
  R 834 0 R 841 0 R 850 0 R 861 0 R 867 0 R 879 0 R 883 0 R 886 0 R 890 0 R 893 
0 R 904 0 R 912 0 R 918 0 R 923 0 R 927 0 R 929 0 R 944 0 R 955 0 R 961 0 R 967 
0 R 970 0 R 978 0 R 986 0 R 990 0 R 995 0 R 1000 0 R 1002 0 R 1004 0 R 1006 0 R 
1017 0 R 1025 0 R 1029 0 R 1036 0 R 1044 0 R 1051 0 R 1055 0 R 1061 0 R 1066 0 
R 1073 0 R 1079 0 R 1082 0 R 1085 0 R 1096 0 R 1100 0 R 1102 0 R 1105 0 R 1109 
0 R 1114 0 R 1117 0 R 1129 0 R 1133 0 R 1139 0 R 1146 0 R 1152 0 R 1156 0 R 
1160 0 R 1162 0 R 1165 0 R 1168 0 R 1171 0 R 1174 0 R 1178 0 R 1182 0 R 1187 0 
R 1191 0 R 1194 0 R 1196 0 R 1206 0 R 1210 0 R 1218 0 R 1227 0 R 1233 0 R 1237 
0 R 1239 0 R 1250 0 R 1253 0 R 1259 0 R 1267 0 R 1270 0 R 1277 0 R 1288 0 R 
1290 0 R 1292 0 R 1301 0 R 1303 0 R 1305 0 R 1308 0 R 1310 0 R 1312 0 R 1314 0 
R 1316 0 R 1319 0 R 1323 0 R 1328 0 R 1330 0 R 1332 0 R 1334 0 R 1339 0 R 1346 
0 R 1351 0 R 1354 0 R 1356 0 R 1359 0 R 1363 0 R 1365 0 R 1368 0 R 1370 0 R 
1373 0 R 1378 0 R 1383 0 R 1391 0 R 1396 0 R 1410 0 R 14
 21 0 R 1425 0 R 1438 0 R 1447 0 R 1461 0 R 1467 0 R 1475 0 R 1490 0 R 1504 0 R 
1515 0 R 1521 0 R 1528 0 R 1537 0 R 1543 0 R 1548 0 R 1557 0 R 1559 0 R 1568 0 
R 1574 0 R 1577 0 R 1590 0 R 1592 0 R 1598 0 R 1604 0 R 1608 0 R 1616 0 R 1624 
0 R 1628 0 R 1630 0 R 1632 0 R 1645 0 R 1652 0 R 1661 0 R 1666 0 R 1680 0 R 
1690 0 R 1694 0 R 1707 0 R 1717 0 R 1722 0 R 1726 0 R 1731 0 R 1734 0 R 1741 0 
R 1745 0 R 1753 0 R 1757 0 R 1762 0 R 1770 0 R 1775 0 R 1781 0 R 1790 0 R 1797 
0 R 1803 0 R 1808 0 R 1812 0 R 1815 0 R 1820 0 R 1826 0 R 1832 0 R 1834 0 R 
1836 0 R 1839 0 R 1847 0 R 1850 0 R 1857 0 R 1866 0 R 1869 0 R 1874 0 R 1878 0 
R 1880 0 R 1885 0 R 1888 0 R 1890 0 R 1895 0 R 1905 0 R 1907 0 R 1909 0 R 1911 
0 R 1914 0 R 1916 0 R 1918 0 R 1921 0 R 1923 0 R 1925 0 R 1931 0 R 1935 0 R 
1944 0 R 1946 0 R 1948 0 R 1955 0 R 1957 0 R 1962 0 R 1964 0 R 1971 0 R 1976 0 
R 1980 0 R 1984 0 R 1988 0 R 1990 0 R 1994 0 R 1997 0 R 1999 0 R 2001 0 R 2005 
0 R 2007 0 R 2010 0 R 2012 0 R 2014 0 R 2016 0 R 2023 0 R 
 2026 0 R 2031 0 R 2033 0 R 2035 0 R 2037 0 R 2039 0 R 2047 0 R 2058 0 R 2073 0 
R 2088 0 R 2094 0 R 2099 0 R 2103 0 R 2106 0 R 2111 0 R 2118 0 R 2120 0 R 2123 
0 R 2125 0 R 2127 0 R 2129 0 R 2133 0 R 2135 0 R 2143 0 R 2150 0 R 2157 0 R 
2169 0 R 2185 0 R 2197 0 R 2216 0 R 2218 0 R 2220 0 R 2224 0 R 2242 0 R 2248 0 
R 2256 0 R 2264 0 R 2268 0 R 2277 0 R 2288 0 R 2294 0 R 2304 0 R 2317 0 R 2335 
0 R 2343 0 R 2345 0 R 2354 0 R 2371 0 R 2378 0 R 2381 0 R 2386 0 R 2391 0 R 
2400 0 R 2409 0 R 2412 0 R 2414 0 R 2418 0 R 2430 0 R 2439 0 R 2444 0 R 2448 0 
R 2451 0 R 2453 0 R 2455 0 R 2457 0 R 2463 0 R 2476 0 R 2486 0 R 2495 0 R 2504 
0 R 2510 0 R 2521 0 R 2528 0 R 2534 0 R 2536 0 R 2547 0 R 2555 0 R 2564 0 R 
2569 0 R 2581 0 R 2585 0 R 2595 0 R 2603 0 R 2611 0 R 2617 0 R 2621 0 R 2625 0 
R 2628 

[14/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html
index d91f532..3dfcedd 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 
@@ -315,29 +315,37 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 numTries
 
 
+protected int
+operationTimeout
+
+
 protected long
 pause
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorService
 pool
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 PRIMARY_CALL_TIMEOUT_KEY
 
-
+
 protected long
 primaryCallTimeoutMicroseconds
 
-
+
 protected RpcRetryingCallerFactory
 rpcCallerFactory
 
-
+
 protected RpcControllerFactory
 rpcFactory
 
+
+protected int
+rpcTimeout
+
 
 protected int
 serverTrackerTimeout
@@ -376,10 +384,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 protected int
 thresholdToLogUndoneTaskDetails
 
-
-protected int
-timeout
-
 
 
 
@@ -395,13 +399,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
-AsyncProcess(ClusterConnectionhc,
+AsyncProcess(ClusterConnectionhc,
 org.apache.hadoop.conf.Configurationconf,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
 RpcRetryingCallerFactoryrpcCaller,
 booleanuseGlobalErrors,
 RpcControllerFactoryrpcFactory,
-intrpcTimeout)
+intrpcTimeout,
+intoperationTimeout)
 
 
 
@@ -438,11 +443,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
 booleanneedResults,
 CancellableRegionServerCallablecallable,
-intcurTimeout)
+intrpcTimeout)
 
 
 protected RpcRetryingCallerAbstractResponse
-createCaller(CancellableRegionServerCallablecallable)
+createCaller(CancellableRegionServerCallablecallable,
+intrpcTimeout)
 Create a caller.
 
 
@@ -495,6 +501,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 ActionRowaction)
 
 
+void
+setOperationTimeout(intoperationTimeout)
+
+
+void
+setRpcTimeout(intrpcTimeout)
+
+
 CResultAsyncRequestFuture
 submit(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
   TableNametableName,
@@ -505,7 +519,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 See submit(ExecutorService,
 TableName, RowAccess, boolean, Batch.Callback, boolean).
 
 
-
+
 CResultAsyncRequestFuture
 submit(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
   TableNametableName,
@@ -516,7 +530,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Extract from the rows list what we can submit.
 
 
-
+
 CResultAsyncRequestFuture
 submit(TableNametableName,
   RowAccess? extends 
Rowrows,
@@ -526,7 +540,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 See #submit(ExecutorService, TableName, RowAccess, boolean, 
Batch.Callback, boolean).
 
 
-
+
 CResultAsyncRequestFuture
 submitAll(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 

[34/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index 07f6210..6d25806 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -370,881 +370,904 @@
 362   * @param regionLocator region 
locator
 363   * @param silence true to ignore 
unmatched column families
 364   * @param copyFile always copy hfiles 
if true
-365   * @throws TableNotFoundException if 
table does not yet exist
-366   */
-367  public void doBulkLoad(Mapbyte[], 
ListPath map, final Admin admin, Table table,
-368  RegionLocator regionLocator, 
boolean silence, boolean copyFile)
-369  throws 
TableNotFoundException, IOException {
-370if 
(!admin.isTableAvailable(regionLocator.getName())) {
-371  throw new 
TableNotFoundException("Table " + table.getName() + " is not currently 
available.");
-372}
-373// LQI queue does not need to be 
threadsafe -- all operations on this queue
-374// happen in this thread
-375DequeLoadQueueItem queue = 
new LinkedList();
-376ExecutorService pool = null;
-377SecureBulkLoadClient secureClient = 
null;
-378try {
-379  prepareHFileQueue(map, table, 
queue, silence);
-380  if (queue.isEmpty()) {
-381LOG.warn("Bulk load operation did 
not get any files to load");
-382return;
-383  }
-384  pool = createExecutorService();
-385  secureClient = new 
SecureBulkLoadClient(table.getConfiguration(), table);
-386  for (Map.Entrybyte[], 
ListPath entry : map.entrySet()) {
-387for (Path p : entry.getValue()) 
{
-388  fs = 
p.getFileSystem(table.getConfiguration());
-389  break;
-390}
-391  }
-392  performBulkLoad(admin, table, 
regionLocator, queue, pool, secureClient, copyFile);
-393} finally {
-394  cleanup(admin, queue, pool, 
secureClient);
-395}
-396  }
-397
-398  /**
-399   * Perform a bulk load of the given 
directory into the given
-400   * pre-existing table.  This method is 
not threadsafe.
-401   *
-402   * @param hfofDir the directory that 
was provided as the output path
-403   *   of a job using HFileOutputFormat
-404   * @param admin the Admin
-405   * @param table the table to load 
into
-406   * @param regionLocator region 
locator
-407   * @param silence true to ignore 
unmatched column families
-408   * @param copyFile always copy hfiles 
if true
-409   * @throws TableNotFoundException if 
table does not yet exist
-410   */
-411  public void doBulkLoad(Path hfofDir, 
final Admin admin, Table table,
-412  RegionLocator regionLocator, 
boolean silence, boolean copyFile)
-413  throws TableNotFoundException, 
IOException {
-414if 
(!admin.isTableAvailable(regionLocator.getName())) {
-415  throw new 
TableNotFoundException("Table " + table.getName() + " is not currently 
available.");
-416}
-417
-418/*
-419 * Checking hfile format is a 
time-consuming operation, we should have an option to skip
-420 * this step when bulkloading 
millions of HFiles. See HBASE-13985.
-421 */
-422boolean validateHFile = 
getConf().getBoolean("hbase.loadincremental.validate.hfile", true);
-423if (!validateHFile) {
-424  LOG.warn("You are skipping HFiles 
validation, it might cause some data loss if files " +
-425  "are not correct. If you fail 
to read data from your table after using this " +
-426  "option, consider removing the 
files and bulkload again without this option. " +
-427  "See HBASE-13985");
-428}
-429// LQI queue does not need to be 
threadsafe -- all operations on this queue
-430// happen in this thread
-431DequeLoadQueueItem queue = 
new LinkedList();
-432ExecutorService pool = null;
-433SecureBulkLoadClient secureClient = 
null;
-434try {
-435  prepareHFileQueue(hfofDir, table, 
queue, validateHFile, silence);
-436
-437  if (queue.isEmpty()) {
-438LOG.warn("Bulk load operation did 
not find any files to load in " +
-439"directory " + hfofDir != 
null ? hfofDir.toUri() : "" + ".  Does it contain files in " +
-440"subdirectories that 
correspond to column family names?");
-441return;
-442  }
-443  pool = createExecutorService();
-444  secureClient = new 
SecureBulkLoadClient(table.getConfiguration(), table);
-445  performBulkLoad(admin, table, 
regionLocator, queue, pool, secureClient, copyFile);
-446} finally {
-447  cleanup(admin, queue, pool, 
secureClient);
-448}
-449  }
-450
-451  void performBulkLoad(final Admin admin, 
Table table, 

[30/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index ee02ddd..ec2440d 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-2014
+2023
 0
 0
-13783
+13747
 
 Files
 
@@ -626,7 +626,7 @@
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
-10
+9
 
 org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
@@ -636,82 +636,92 @@
 org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 0
 0
-24
+22
 
-org/apache/hadoop/hbase/client/BatchErrors.java
+org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java
 0
 0
 1
 
-org/apache/hadoop/hbase/client/BufferedMutator.java
+org/apache/hadoop/hbase/client/BatchErrors.java
 0
 0
 1
 
-org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
+org/apache/hadoop/hbase/client/BufferedMutator.java
 0
 0
-2
+1
 
+org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
+0
+0
+1
+
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
 206
-
+
 org/apache/hadoop/hbase/client/ClientServiceCallable.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientSimpleScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientSmallReversedScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ClientSmallScanner.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ConnectionConfiguration.java
 0
 0
 1
+
+org/apache/hadoop/hbase/client/ConnectionFactory.java
+0
+0
+1
 
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
-8
+3
 
 org/apache/hadoop/hbase/client/ConnectionUtils.java
 0
@@ -761,7 +771,7 @@
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
-6
+5
 
 org/apache/hadoop/hbase/client/HTableWrapper.java
 0
@@ -926,7 +936,7 @@
 org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
 0
 0
-3
+2
 
 org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
 0
@@ -2436,7 +2446,7 @@
 org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
 0
 0
-4
+7
 
 org/apache/hadoop/hbase/mapreduce/MultiHFileOutputFormat.java
 0
@@ -2686,7 +2696,7 @@
 org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
 0
 0
-10
+12
 
 org/apache/hadoop/hbase/master/MasterDumpServlet.java
 0
@@ -3006,7 +3016,7 @@
 org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.java
 0
 0
-57
+58
 
 org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
 0
@@ -3021,12 +3031,12 @@
 org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
 0
 0
-21
+17
 
 org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
 0
@@ -3148,26 +3158,6 @@
 0
 1
 
-org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java
-0
-0
-5
-
-org/apache/hadoop/hbase/mob/mapreduce/SweepJob.java
-0
-0
-8
-
-org/apache/hadoop/hbase/mob/mapreduce/SweepReducer.java
-0
-0
-9
-
-org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java
-0
-0
-6
-
 org/apache/hadoop/hbase/monitoring/LogMonitoring.java
 0
 0
@@ -3298,20 +3288,30 @@
 0
 10
 
+org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
+0
+0
+4
+
+org/apache/hadoop/hbase/procedure2/BadProcedureException.java
+0
+0
+1
+
 org/apache/hadoop/hbase/procedure2/Procedure.java
 0
 0
-3
+2
 
 org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 0
 0
-11
+10
 
 org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java
 0
@@ -3353,25 +3353,15 @@
 0
 9
 
-org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
-0
-0
-1
-
 org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
 0
 0
-8
-
+9
+
 org/apache/hadoop/hbase/procedure2/util/StringUtils.java
 0
 0
 4
-
-org/apache/hadoop/hbase/procedure2/util/TimeoutBlockingQueue.java
-0
-0
-5
 
 org/apache/hadoop/hbase/protobuf/ProtobufMagic.java
 0
@@ -3416,3263 +3406,3268 @@
 org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/quotas/QuotaCache.java
 0
 0
 2
 
-org/apache/hadoop/hbase/quotas/QuotaLimiterFactory.java
+org/apache/hadoop/hbase/quotas/QuotaLimiter.java
 0
 0
 1
 
+org/apache/hadoop/hbase/quotas/QuotaLimiterFactory.java
+0
+0
+1
+
 org/apache/hadoop/hbase/quotas/QuotaRetriever.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/quotas/QuotaSettings.java
 0
 0
 6
-
+
 

[33/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html
deleted file mode 100644
index 2c22213..000
--- a/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html
+++ /dev/null
@@ -1,198 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package 
org.apache.hadoop.hbase.mob.mapreduce;
-020
-021import java.io.IOException;
-022
-023import 
org.apache.hadoop.conf.Configuration;
-024import 
org.apache.hadoop.conf.Configured;
-025import org.apache.hadoop.fs.FileSystem;
-026import 
org.apache.hadoop.hbase.HBaseConfiguration;
-027import 
org.apache.hadoop.hbase.HColumnDescriptor;
-028import 
org.apache.hadoop.hbase.HTableDescriptor;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-032import 
org.apache.hadoop.hbase.client.Admin;
-033import 
org.apache.hadoop.hbase.client.Connection;
-034import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-035import 
org.apache.hadoop.hbase.client.HBaseAdmin;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import org.apache.hadoop.util.Tool;
-038import 
org.apache.hadoop.util.ToolRunner;
-039import 
org.apache.zookeeper.KeeperException;
-040
-041/**
-042 * The sweep tool. It deletes the mob 
files that are not used and merges the small mob files to
-043 * bigger ones. Each run of this sweep 
tool only handles one column family. The runs on
-044 * the same column family are mutually 
exclusive. And the major compaction and sweep tool on the
-045 * same column family are mutually 
exclusive too.
-046 */
-047@InterfaceAudience.Public
-048@InterfaceStability.Evolving
-049public class Sweeper extends Configured 
implements Tool {
-050
-051  /**
-052   * Sweeps the mob files on one column 
family. It deletes the unused mob files and merges
-053   * the small mob files into bigger 
ones.
-054   * @param tableName The current table 
name in string format.
-055   * @param familyName The column family 
name.
-056   * @return 0 if success, 2 if job 
aborted with an exception, 3 if unable to start due to
-057   *   other compaction,4 if mr job was 
unsuccessful
-058   * @throws IOException
-059   * @throws InterruptedException
-060   * @throws ClassNotFoundException
-061   * @throws KeeperException
-062   * @throws ServiceException
-063   */
-064  int sweepFamily(String tableName, 
String familyName) throws IOException, InterruptedException,
-065  ClassNotFoundException, 
KeeperException {
-066Configuration conf = getConf();
-067// make sure the target HBase 
exists.
-068HBaseAdmin.available(conf);
-069Connection connection = 
ConnectionFactory.createConnection(getConf());
-070Admin admin = 
connection.getAdmin();
-071try {
-072  FileSystem fs = 
FileSystem.get(conf);
-073  TableName tn = 
TableName.valueOf(tableName);
-074  HTableDescriptor htd = 
admin.getTableDescriptor(tn);
-075  HColumnDescriptor family = 
htd.getFamily(Bytes.toBytes(familyName));
-076  if (family == null || 
!family.isMobEnabled()) {
-077  throw new IOException("Column 
family " + familyName + " is not a MOB column family");
-078  }
-079  SweepJob job = new SweepJob(conf, 
fs);
-080  // Run the sweeping
-081  return job.sweep(tn, family);
-082} catch (Exception e) {
-083  System.err.println("Job aborted due 
to exception " + e);
-084  return 2; // job failed
-085} finally {
-086  try {
-087admin.close();
-088  } catch (IOException e) {
-089System.out.println("Failed to 
close the HBaseAdmin: " + e.getMessage());
-090  }
-091  try {
-092connection.close();
-093  } catch (IOException e) {
-094System.out.println("Failed to 

[26/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/Abortable.html
index ceb5289..411d627 100644
--- a/devapidocs/org/apache/hadoop/hbase/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/Abortable.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Known Implementing Classes:
-ConnectionImplementation, 
ConnectionUtils.MasterlessConnection, CoprocessorHConnection, DumpReplicationQueues.WarnOnlyAbortable,
 HBaseAdmin, HBaseAdmin.ThrowableAbortable, HBaseInterClusterReplicationEndpoint,
 HBaseReplicationEndpoint, HMaster, HMasterCommandLine.LocalHMaster, HRegionServer, RegionReplicaReplicationEndpoint,
 ReplicationHFileCleaner.WarnOnlyAbortable,
 ReplicationLogCleaner.WarnOnlyAbortable,
 ReplicationPeerZKImpl, ReplicationSyncUp.DummyServer,
 SweepJob.DummyMobAbortable, ZooKeeperKeepAliveConnection, ZooKeeperWatcher
+ConnectionImplementation, 
ConnectionUtils.MasterlessConnection, CoprocessorHConnection, DumpReplicationQueues.WarnOnlyAbortable,
 HBaseAdmin, HBaseAdmin.ThrowableAbortable, HBaseInterClusterReplicationEndpoint,
 HBaseReplicationEndpoint, HMaster, HMasterCommandLine.LocalHMaster, HRegionServer, RegionReplicaReplicationEndpoint,
 ReplicationHFileCleaner.WarnOnlyAbortable,
 ReplicationLogCleaner.WarnOnlyAbortable,
 ReplicationPeerZKImpl, ReplicationSyncUp.DummyServer,
 ZooKeeperKeepAliveConnection, ZooKeeperWatcher
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
index 6443399..d7cabbf 100644
--- a/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html
@@ -122,7 +122,7 @@
 
 
 Direct Known Subclasses:
-CallCancelledException, CallTimeoutException, CellScannerButNoCodecException, ClusterSchemaException, CodecException, CorruptedWALProcedureStoreException,
 DamagedWALException, DoNotRetryIOException, FailedServerException, FallbackDisallowedException, 
LeaseNotRecoveredException, 
PleaseHoldException, RegionException, ServiceNotRunningException, StoppedRpcClientException, TableInfoMissingException, UnexpectedStateException, WrongRowIOException
+BadProcedureException, 
CallCancelledException, CallTimeoutException, CellScannerButNoCodecException, ClusterSchemaException, CodecException, CorruptedWALProcedureStoreException,
 DamagedWALException, DoNotRetryIOException, FailedServerException, FallbackDisallowedException, 
LeaseNotRecoveredException, 
PleaseHoldException, RegionException, ServiceNotRunningException, StoppedRpcClientException, TableInfoMissingException, UnexpectedStateException, WrongRowIOException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html
index 9af407e..769c286 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html
@@ -125,7 +125,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public static class KeyValue.KVComparator
+public static class KeyValue.KVComparator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.io.RawComparatorCell, KeyValue.SamePrefixComparatorbyte[]
 Compare KeyValues.  When we compare KeyValues, we only 
compare the Key
@@ -446,7 +446,7 @@ implements org.apache.hadoop.io.RawComparator
 
 KVComparator
-publicKVComparator()
+publicKVComparator()
 Deprecated.
 
 
@@ -464,7 +464,7 @@ implements org.apache.hadoop.io.RawComparator
 
 getLegacyKeyComparatorName
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetLegacyKeyComparatorName()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetLegacyKeyComparatorName()
 Deprecated.
 The HFileV2 file format's trailer contains this class name. 
 We reinterpret this and
  instantiate the appropriate comparator.
@@ -481,7 +481,7 @@ implements org.apache.hadoop.io.RawComparator
 
 compare
-publicintcompare(byte[]l,

[15/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html
index a8ade35..3087937 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class AsyncProcess.RowCheckerHost
+static class AsyncProcess.RowCheckerHost
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Collect all advices from checkers and make the final 
decision.
 
@@ -211,7 +211,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 checkers
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListAsyncProcess.RowChecker checkers
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListAsyncProcess.RowChecker checkers
 
 
 
@@ -220,7 +220,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 isEnd
-privateboolean isEnd
+privateboolean isEnd
 
 
 
@@ -237,7 +237,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 RowCheckerHost
-RowCheckerHost(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListAsyncProcess.RowCheckercheckers)
+RowCheckerHost(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListAsyncProcess.RowCheckercheckers)
 
 
 
@@ -254,7 +254,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 reset
-voidreset()
+voidreset()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/InterruptedIOException.html?is-external=true;
 title="class or interface in java.io">InterruptedIOException
 
 Throws:
@@ -268,7 +268,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 canTakeOperation
-AsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc,
+AsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc,
 longrowSize)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html
index e29304a..b6ea032 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class AsyncProcess.SubmittedSizeChecker
+static class AsyncProcess.SubmittedSizeChecker
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncProcess.RowChecker
 limit the heapsize of total submitted data.
@@ -243,7 +243,7 @@ implements 
 
 maxHeapSizeSubmit
-private finallong maxHeapSizeSubmit
+private finallong maxHeapSizeSubmit
 
 
 
@@ -252,7 +252,7 @@ implements 
 
 heapSize
-privatelong heapSize
+privatelong heapSize
 
 
 
@@ -269,7 +269,7 @@ implements 
 
 SubmittedSizeChecker
-SubmittedSizeChecker(longmaxHeapSizeSubmit)
+SubmittedSizeChecker(longmaxHeapSizeSubmit)
 
 
 
@@ -286,7 +286,7 @@ implements 
 
 canTakeOperation
-publicAsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc,
+publicAsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc,

longrowSize)
 
 Specified by:
@@ -300,7 +300,7 @@ implements 
 
 notifyFinal
-publicvoidnotifyFinal(AsyncProcess.RowChecker.ReturnCodecode,
+publicvoidnotifyFinal(AsyncProcess.RowChecker.ReturnCodecode,
 HRegionLocationloc,
 longrowSize)
 Description copied from 
interface:AsyncProcess.RowChecker
@@ -319,7 +319,7 @@ implements 
 
 reset
-publicvoidreset()
+publicvoidreset()
 Description copied from 
interface:AsyncProcess.RowChecker
 Reset the inner state.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.TaskCountChecker.html
--
diff --git 

[48/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 43c257d..1aa9061 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -1,9 +1,9 @@
-[ /Title 

+[ /Title (Apache HBase  Reference Guide)
   /Author (Apache HBase Team)
-  /Subject null
-  /Keywords null
-  /ModDate (D:20161009074600)
-  /CreationDate (D:20161009074600)
-  /Creator (Asciidoctor PDF 1.5.0.alpha.11, based on Prawn 1.3.0)
-  /Producer null
+  /Subject ()
+  /Keywords ()
+  /ModDate (D:20161017144547)
+  /CreationDate (D:20161017144547)
+  /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
+  /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 492f79c..cb4fc54 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -15,6 +15,9 @@
 AccessDeniedException
 Admin
 Append
+AsyncConnection
+AsyncTable
+AsyncTableRegionLocator
 Attributes
 AuthUtil
 BadAuthException
@@ -306,7 +309,6 @@
 StructBuilder
 StructIterator
 SubstringComparator
-Sweeper
 Table
 TableExistsException
 TableInfoMissingException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index 0efd518..053863f 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -15,6 +15,9 @@
 AccessDeniedException
 Admin
 Append
+AsyncConnection
+AsyncTable
+AsyncTableRegionLocator
 Attributes
 AuthUtil
 BadAuthException
@@ -306,7 +309,6 @@
 StructBuilder
 StructIterator
 SubstringComparator
-Sweeper
 Table
 TableExistsException
 TableInfoMissingException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 07faf6f..54bf2e4 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -2757,6 +2757,25 @@
 
 
 
+org.apache.hadoop.hbase.client.ConnectionFactory
+
+Modifier and Type
+Constant Field
+Value
+
+
+
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HBASE_CLIENT_ASYNC_CONNECTION_IMPL
+"hbase.client.async.connection.impl"
+
+
+
+
+
+
 org.apache.hadoop.hbase.client.HTableMultiplexer
 
 Modifier and Type

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 0a9fd51..da0c7c8 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -307,6 +307,8 @@
 Delete all columns of the specified family with a timestamp 
equal to
  the specified timestamp.
 
+addFilter(ListFilter)
 - Method in class org.apache.hadoop.hbase.filter.FilterList
+
 addFilter(Filter)
 - Method in class org.apache.hadoop.hbase.filter.FilterList
 
 Add a filter.
@@ -487,6 +489,18 @@
 
 assign(byte[])
 - Method in interface org.apache.hadoop.hbase.client.Admin
 
+AsyncConnection - Interface in org.apache.hadoop.hbase.client
+
+The asynchronous version of Connection.
+
+AsyncTable - Interface in org.apache.hadoop.hbase.client
+
+The asynchronous version of Table.
+
+AsyncTableRegionLocator - Interface in org.apache.hadoop.hbase.client
+
+The asynchronous version of RegionLocator.
+
 ATTRIBUTE_SEPERATOR_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.mapreduce.ImportTsv
 
 Attributes - Interface in org.apache.hadoop.hbase.client
@@ -1897,6 +1911,19 @@
 
 CREATE_TABLE_CONF_KEY
 - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
 
+createAsyncConnection()
 - Static method in class org.apache.hadoop.hbase.client.ConnectionFactory
+
+Call ConnectionFactory.createAsyncConnection(Configuration)
 using default HBaseConfiguration.
+
+createAsyncConnection(Configuration)
 - Static method in class org.apache.hadoop.hbase.client.ConnectionFactory
+
+Call ConnectionFactory.createAsyncConnection(Configuration,
 User) using the given conf and a
+ User object created by UserProvider.
+
+createAsyncConnection(Configuration,
 User) - Static method in class org.apache.hadoop.hbase.client.ConnectionFactory
+
+Create a new AsyncConnection instance using the passed 
conf and user.
+
 createCell(byte[],
 byte[], byte[], long, byte, byte[]) - Static method in class 

[50/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index e1a3825..dc87cb1 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) ACID Properties
@@ -600,7 +600,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 



[04/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/Get.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Get.html 
b/devapidocs/org/apache/hadoop/hbase/client/Get.html
index 0ede724..c75f7db 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Get.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Get.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -218,7 +218,7 @@ implements 
 
 Fields inherited from classorg.apache.hadoop.hbase.client.Query
-colFamTimeRangeMap,
 consistency,
 filter,
 targetReplicaId
+colFamTimeRangeMap,
 consistency,
 filter,
 loadColumnFamiliesOnDemand,
 targetReplicaId
 
 
 
@@ -445,35 +445,42 @@ implements 
 
 Get
+setLoadColumnFamiliesOnDemand(booleanvalue)
+Set the value indicating whether loading CFs on demand 
should be allowed (cluster
+ default is false).
+
+
+
+Get
 setMaxResultsPerColumnFamily(intlimit)
 Set the maximum number of values to return per row per 
Column Family
 
 
-
+
 Get
 setMaxVersions()
 Get all available versions.
 
 
-
+
 Get
 setMaxVersions(intmaxVersions)
 Get up to the specified number of versions of each 
column.
 
 
-
+
 Get
 setReplicaId(intId)
 Specify region replica id where Query will fetch data 
from.
 
 
-
+
 Get
 setRowOffsetPerColumnFamily(intoffset)
 Set offset for the row per Column Family.
 
 
-
+
 Get
 setTimeRange(longminStamp,
 longmaxStamp)
@@ -481,13 +488,13 @@ implements 
 
 
-
+
 Get
 setTimeStamp(longtimestamp)
 Get versions of columns with the specified timestamp.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 toMap(intmaxCols)
 Compile the details beyond the scope of getFingerprint 
(row, columns,
@@ -500,7 +507,7 @@ implements 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.Query
-getACL,
 getAuthorizations,
 getColumnFamilyTimeRange,
 getConsistency,
 getFilter,
 getIsolationLevel,
 getReplicaId
+doLoadColumnFamiliesOnDemand,
 getACL,
 getAuthorizations,
 getColumnFamilyTimeRange,
 getConsistency,
 getFilter,
 getIsolationLevel,
 getLoadColumnFamiliesOnDemandValue,
 get
 ReplicaId
 
 
 
@@ -680,7 +687,7 @@ implements 
 
 isCheckExistenceOnly
-publicbooleanisCheckExistenceOnly()
+publicbooleanisCheckExistenceOnly()
 
 
 
@@ -689,7 +696,7 @@ implements 
 
 setCheckExistenceOnly
-publicGetsetCheckExistenceOnly(booleancheckExistenceOnly)
+publicGetsetCheckExistenceOnly(booleancheckExistenceOnly)
 
 
 
@@ -699,7 +706,7 @@ implements 
 isClosestRowBefore
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicbooleanisClosestRowBefore()
+publicbooleanisClosestRowBefore()
 Deprecated.since 2.0.0 and will be removed in 3.0.0
 This will always return the default value which is false as 
client cannot set the value to this
  property any more.
@@ -712,7 +719,7 @@ publicboolean
 setClosestRowBefore
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicGetsetClosestRowBefore(booleanclosestRowBefore)
+publicGetsetClosestRowBefore(booleanclosestRowBefore)
 Deprecated.since 2.0.0 and will be removed in 3.0.0
 This is not used any more and does nothing. Use reverse 
scan instead.
 
@@ -723,7 +730,7 @@ public
 
 addFamily
-publicGetaddFamily(byte[]family)
+publicGetaddFamily(byte[]family)
 Get all columns from the specified family.
  
  Overrides previous calls to addColumn for this family.
@@ -741,7 +748,7 @@ public
 
 addColumn
-publicGetaddColumn(byte[]family,
+publicGetaddColumn(byte[]family,
  byte[]qualifier)
 Get the column from the specific family 

[07/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html 
b/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html
new file mode 100644
index 000..bded8a8
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html
@@ -0,0 +1,320 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+ClusterRegistryFactory (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
ClusterRegistryFactory
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.ClusterRegistryFactory
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+final class ClusterRegistryFactory
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Get instance of configured Registry.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+REGISTRY_IMPL_CONF_KEY
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+private 
+ClusterRegistryFactory()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+(package private) static ClusterRegistry
+getRegistry(org.apache.hadoop.conf.Configurationconf)
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+REGISTRY_IMPL_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGISTRY_IMPL_CONF_KEY
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+ClusterRegistryFactory
+privateClusterRegistryFactory()
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+getRegistry

[39/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
index cf4fb8f..bec00e7 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html
@@ -118,418 +118,423 @@
 110this.storeOffset = 
get.getRowOffsetPerColumnFamily();
 111this.tr = get.getTimeRange();
 112this.checkExistenceOnly = 
get.isCheckExistenceOnly();
-113Mapbyte[], 
NavigableSetbyte[] fams = get.getFamilyMap();
-114for 
(Map.Entrybyte[],NavigableSetbyte[] entry : fams.entrySet()) 
{
-115  byte [] fam = entry.getKey();
-116  NavigableSetbyte[] cols = 
entry.getValue();
-117  if (cols != null  
cols.size()  0) {
-118for (byte[] col : cols) {
-119  addColumn(fam, col);
-120}
-121  } else {
-122addFamily(fam);
-123  }
-124}
-125for (Map.EntryString, byte[] 
attr : get.getAttributesMap().entrySet()) {
-126  setAttribute(attr.getKey(), 
attr.getValue());
-127}
-128for (Map.Entrybyte[], 
TimeRange entry : get.getColumnFamilyTimeRange().entrySet()) {
-129  TimeRange tr = entry.getValue();
-130  
setColumnFamilyTimeRange(entry.getKey(), tr.getMin(), tr.getMax());
-131}
-132  }
-133
-134  public boolean isCheckExistenceOnly() 
{
-135return checkExistenceOnly;
-136  }
-137
-138  public Get 
setCheckExistenceOnly(boolean checkExistenceOnly) {
-139this.checkExistenceOnly = 
checkExistenceOnly;
-140return this;
-141  }
-142
-143  /**
-144   * This will always return the default 
value which is false as client cannot set the value to this
-145   * property any more.
-146   * @deprecated since 2.0.0 and will be 
removed in 3.0.0
-147   */
-148  @Deprecated
-149  public boolean isClosestRowBefore() {
-150return closestRowBefore;
-151  }
-152
-153  /**
-154   * This is not used any more and does 
nothing. Use reverse scan instead.
-155   * @deprecated since 2.0.0 and will be 
removed in 3.0.0
-156   */
-157  @Deprecated
-158  public Get setClosestRowBefore(boolean 
closestRowBefore) {
-159// do Nothing
-160return this;
-161  }
-162
-163  /**
-164   * Get all columns from the specified 
family.
-165   * p
-166   * Overrides previous calls to 
addColumn for this family.
-167   * @param family family name
-168   * @return the Get object
-169   */
-170  public Get addFamily(byte [] family) 
{
-171familyMap.remove(family);
-172familyMap.put(family, null);
-173return this;
-174  }
-175
-176  /**
-177   * Get the column from the specific 
family with the specified qualifier.
-178   * p
-179   * Overrides previous calls to 
addFamily for this family.
-180   * @param family family name
-181   * @param qualifier column qualifier
-182   * @return the Get objec
-183   */
-184  public Get addColumn(byte [] family, 
byte [] qualifier) {
-185NavigableSetbyte [] set = 
familyMap.get(family);
-186if(set == null) {
-187  set = new TreeSetbyte 
[](Bytes.BYTES_COMPARATOR);
-188}
-189if (qualifier == null) {
-190  qualifier = 
HConstants.EMPTY_BYTE_ARRAY;
-191}
-192set.add(qualifier);
-193familyMap.put(family, set);
-194return this;
-195  }
-196
-197  /**
-198   * Get versions of columns only within 
the specified timestamp range,
-199   * [minStamp, maxStamp).
-200   * @param minStamp minimum timestamp 
value, inclusive
-201   * @param maxStamp maximum timestamp 
value, exclusive
-202   * @throws IOException
-203   * @return this for invocation 
chaining
-204   */
-205  public Get setTimeRange(long minStamp, 
long maxStamp) throws IOException {
-206tr = new TimeRange(minStamp, 
maxStamp);
-207return this;
-208  }
-209
-210  /**
-211   * Get versions of columns with the 
specified timestamp.
-212   * @param timestamp version timestamp
-213   * @return this for invocation 
chaining
-214   */
-215  public Get setTimeStamp(long 
timestamp)
-216  throws IOException {
-217try {
-218  tr = new TimeRange(timestamp, 
timestamp+1);
-219} catch(Exception e) {
-220  // This should never happen, unless 
integer overflow or something extremely wrong...
-221  LOG.error("TimeRange failed, likely 
caused by integer overflow. ", e);
-222  throw e;
-223}
-224return this;
-225  }
-226
-227  @Override public Get 
setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) {
-228return (Get) 
super.setColumnFamilyTimeRange(cf, minStamp, maxStamp);
-229  }
-230
-231  /**
-232   * Get all available versions.
-233   * @return this for invocation 
chaining
-234   */
-235  public Get setMaxVersions() {
-236this.maxVersions = 
Integer.MAX_VALUE;
-237return this;
-238  }
-239
-240  /**
-241   * Get up to the specified number of 
versions of 

[29/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/coc.html
--
diff --git a/coc.html b/coc.html
index cd135fe..a188015 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 7a9259b..16df94a 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index c4feba0..17060df 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 88f7d2a..d132935 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1775,7 +1775,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index dd228d3..a9ef8b6 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 6e000ae..ddcb074 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -828,7 +828,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index 756908b..0671430 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -29,6 +29,7 @@
 AbstractMultiFileWriter.WriterFactory
 AbstractMultiOutputCompactor
 AbstractPositionedByteRange
+AbstractProcedureScheduler
 AbstractProtobufLogWriter
 AbstractResponse
 AbstractResponse.ResponseType
@@ -74,6 +75,9 @@
 AssignmentManagerStatusTmpl.Intf
 AssignmentManagerStatusTmplImpl
 AssignmentVerificationReport
+AsyncConnection

[13/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
index 1a50c3f..c68b2f9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private final class AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
+private final class AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
 Runnable (that can be submitted to thread pool) that waits 
for when it's time
@@ -229,7 +229,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 startTime
-private finallong startTime
+private finallong startTime
 
 
 
@@ -238,7 +238,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 initialActions
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRow initialActions
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRow initialActions
 
 
 
@@ -255,7 +255,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 ReplicaCallIssuingRunnable
-publicReplicaCallIssuingRunnable(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRowinitialActions,
+publicReplicaCallIssuingRunnable(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRowinitialActions,
   longstartTime)
 
 
@@ -273,7 +273,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
@@ -286,7 +286,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 addReplicaActions
-privatevoidaddReplicaActions(intindex,
+privatevoidaddReplicaActions(intindex,
http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,MultiActionRowactionsByServer,
http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRowunknownReplicaActions)
 Add replica actions to action map by server.
@@ -303,7 +303,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 addReplicaActionsAgain
-privatevoidaddReplicaActionsAgain(ActionRowaction,
+privatevoidaddReplicaActionsAgain(ActionRowaction,
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,MultiActionRowactionsByServer)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
index 77a7c12..4952610 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class AsyncRequestFutureImpl.ReplicaResultState
+private static class AsyncRequestFutureImpl.ReplicaResultState
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Sync point for calls to multiple replicas for the same user 
request (Get).
  Created and put in the results array (we assume 

[12/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
index daa4efb..73f6f31 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AsyncRequestFutureImplCResult
+class AsyncRequestFutureImplCResult
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncRequestFuture
 The context, and return value, for a single 
submit/submitAll call.
@@ -205,33 +205,33 @@ implements currentCallable
 
 
-private int
-currentCallTotalTimeout
-
-
 private BatchErrors
 errors
 
-
+
 private ConnectionImplementation.ServerErrorTracker
 errorsByServer
 
-
+
 private boolean
 hasAnyReplicaGets
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 heapSizesByServer
 
-
+
 private static 
org.apache.commons.logging.Log
 LOG
 
-
+
 private long
 nonceGroup
 
+
+private int
+operationTimeout
+
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorService
 pool
@@ -255,9 +255,17 @@ implements 
+private int
+rpcTimeout
+
+
 private TableName
 tableName
 
+
+private RetryingTimeTracker
+tracker
+
 
 
 
@@ -273,7 +281,7 @@ implements Constructor and Description
 
 
-AsyncRequestFutureImpl(TableNametableName,
+AsyncRequestFutureImpl(TableNametableName,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRowactions,
   longnonceGroup,
   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
@@ -281,7 +289,8 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
   Batch.CallbackCResultcallback,
   CancellableRegionServerCallablecallable,
-  inttimeout,
+  intoperationTimeout,
+  intrpcTimeout,
   AsyncProcessasyncProcess)
 
 
@@ -537,7 +546,16 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
+
+
+
+
+
+
+
+tracker
+privateRetryingTimeTracker tracker
 
 
 
@@ -546,7 +564,7 @@ implements 
 
 callback
-private finalBatch.CallbackCResult callback
+private finalBatch.CallbackCResult callback
 
 
 
@@ -555,7 +573,7 @@ implements 
 
 errors
-private finalBatchErrors errors
+private finalBatchErrors errors
 
 
 
@@ -564,7 +582,7 @@ implements 
 
 errorsByServer
-private finalConnectionImplementation.ServerErrorTracker 
errorsByServer
+private finalConnectionImplementation.ServerErrorTracker 
errorsByServer
 
 
 
@@ -573,7 +591,7 @@ implements 
 
 pool
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutorService pool
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutorService pool
 
 
 
@@ -582,7 +600,7 @@ implements 
 
 callsInProgress
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetCancellableRegionServerCallable callsInProgress
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetCancellableRegionServerCallable callsInProgress
 
 
 
@@ -591,7 +609,7 @@ implements 
 
 tableName
-private finalTableName tableName
+private finalTableName tableName
 
 
 
@@ -600,7 +618,7 @@ implements 
 
 actionsInProgress
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong actionsInProgress
+private 

[31/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index a41fa28..0469490 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -305,7 +305,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-10-09
+  Last Published: 
2016-10-17
 
 
 



[17/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
index 6351eea..305ceba 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
@@ -185,62 +185,58 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.security.token
 
 
-
+
 org.apache.hadoop.hbase.security.visibility
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.types
 
 
@@ -248,11 +244,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  extensible data type API.
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
@@ -629,34 +625,52 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 interface
-Attributes
+AsyncConnection
+The asynchronous version of Connection.
+
+
+
+interface
+AsyncTable
+The asynchronous version of Table.
+
+
+
+interface
+AsyncTableRegionLocator
+The asynchronous version of RegionLocator.
+
 
 
 interface
+Attributes
+
+
+interface
 BufferedMutator
 Used to communicate with a single HBase table similar to Table but 
meant for
  batched, asynchronous puts.
 
 
-
+
 static interface
 BufferedMutator.ExceptionListener
 Listens for asynchronous exceptions on a BufferedMutator.
 
 
-
+
 class
 BufferedMutatorParams
 Parameters for instantiating a BufferedMutator.
 
 
-
+
 class
 CompactionState
 POJO representing the compaction state
 
 
-
+
 class
 CompactType
 Currently, there are only two compact types:
@@ -664,148 +678,148 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
  MOB means do mob files compaction.
 
 
-
+
 interface
 Connection
 A cluster connection encapsulating lower level individual 
connections to actual servers and
  a connection to zookeeper.
 
 
-
+
 class
 ConnectionFactory
 A non-instantiable class that manages creation of Connections.
 
 
-
+
 class
 Consistency
 Consistency defines the expected consistency level for an 
operation.
 
 
-
+
 class
 Delete
 Used to perform Delete operations on a single row.
 
 
-
+
 class
 DoNotRetryRegionException
 Similar to RegionException, but disables retries.
 
 
-
+
 class
 Durability
 Enum describing the durability guarantees for tables and Mutations
  Note that the items must be sorted in order of increasing durability
 
 
-
+
 class
 Get
 Used to perform Get operations on a single row.
 
 
-
+
 class
 HTableMultiplexer
 HTableMultiplexer provides a thread-safe non blocking PUT 
API across all the tables.
 
 
-
+
 static class
 HTableMultiplexer.HTableMultiplexerStatus
 HTableMultiplexerStatus keeps track of the current status 
of the HTableMultiplexer.
 
 
-
+
 class
 Increment
 Used to perform Increment operations on a single row.
 
 
-
+
 class
 IsolationLevel
 Specify Isolation levels in Scan operations.
 
 
-
+
 class
 MasterSwitchType
 Represents the master switch type
 
 
-
+
 class
 Mutation
 
-
+
 class
 NoServerForRegionException
 Thrown when no region server can be found for a region
 
 
-
+
 class
 Operation
 Superclass for any type that maps to a potentially 
application-level query.
 
 
-
+
 class
 OperationWithAttributes
 
-
+
 class
 Put
 Used to perform Put operations for a single row.
 
 
-
+
 class
 Query
 
-
+
 class
 RegionLoadStats
 POJO representing region server load
 
 
-
+
 interface
 RegionLocator
 Used to view region location information for a single HBase 
table.
 
 
-
+
 class
 RegionOfflineException
 Thrown when a table can not be located
 
 
-
+
 class
 Result
 Single row result of a Get or Scan query.
 
 
-
+
 interface
 ResultScanner
 Interface for client-side scanning.
 
 
-
+
 class
 RetriesExhaustedException
 Exception thrown by HTable methods when an attempt to do 
something (like
  commit changes) fails after a bunch of retries.
 
 
-
+
 class
 RetriesExhaustedWithDetailsException
 This subclass of RetriesExhaustedException
@@ -813,25 +827,25 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  exceptions on what 

[23/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
index dd14b89..7076ece 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
@@ -171,9 +171,10 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static KeyValue
-copyCellTo(Cellcell,
+copyCellTo(Cellcell,
   byte[]buf,
-  intoffset)
+  intoffset,
+  intlen)
 Write the given cell in KeyValue serialization format into 
the given buf and return a new
  KeyValue object around that.
 
@@ -401,7 +402,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-static void
+static int
 oswrite(Cellcell,
http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
booleanwithTags)
@@ -1136,25 +1137,26 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/L
 
 
 oswrite
-public staticvoidoswrite(Cellcell,
-   http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
-   booleanwithTags)
-throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+public staticintoswrite(Cellcell,
+  http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
+  booleanwithTags)
+   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 
 
-
+
 
 
 
 
 copyCellTo
-public staticKeyValuecopyCellTo(Cellcell,
+public staticKeyValuecopyCellTo(Cellcell,
   byte[]buf,
-  intoffset)
+  intoffset,
+  intlen)
 Write the given cell in KeyValue serialization format into 
the given buf and return a new
  KeyValue object around that.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html 
b/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html
index 901474e..b0e126a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html
@@ -283,7 +283,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable
 hasClientAckTime()
 
 
-private boolean
+boolean
 hasOwner()
 
 
@@ -519,7 +519,7 @@ public
 
 hasOwner
-privatebooleanhasOwner()
+publicbooleanhasOwner()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index 9deb4da..0361c8c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
@@ -109,40 +109,36 @@
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.procedure.flush
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.snapshot
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.replication.master
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver
 
 
-
+
 org.apache.hadoop.hbase.util.hbck
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
@@ -441,24 +437,6 @@
 
 
 
-
-
-
-Uses of Abortable in org.apache.hadoop.hbase.mob.mapreduce
-
-Classes in org.apache.hadoop.hbase.mob.mapreduce
 that implement Abortable
-
-Modifier and Type
-Class and Description
-
-
-
-static class
-SweepJob.DummyMobAbortable
-
-
-
-
 
 
 



[10/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html
new file mode 100644
index 000..5a41be6
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html
@@ -0,0 +1,637 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+AsyncSingleRequestRpcRetryingCaller (Apache HBase 2.0.0-SNAPSHOT 
API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
AsyncSingleRequestRpcRetryingCallerT
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.AsyncSingleRequestRpcRetryingCallerT
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+class AsyncSingleRequestRpcRetryingCallerT
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Retry caller for a single request, such as get, put, 
delete, etc.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+static interface
+AsyncSingleRequestRpcRetryingCaller.CallableT
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private AsyncSingleRequestRpcRetryingCaller.CallableT
+callable
+
+
+private AsyncConnectionImpl
+conn
+
+
+private HBaseRpcController
+controller
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRetriesExhaustedException.ThrowableWithExtraContext
+exceptions
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT
+future
+
+
+private static 
org.apache.commons.logging.Log
+LOG
+
+
+private int
+maxAttempts
+
+
+private long
+operationTimeoutNs
+
+
+private long
+pauseNs
+
+
+private io.netty.util.HashedWheelTimer
+retryTimer
+
+
+private byte[]
+row
+
+
+private long
+rpcTimeoutNs
+
+
+private int
+startLogErrorsCnt
+
+
+private long
+startNs
+
+
+private TableName
+tableName
+
+
+private int
+tries
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+AsyncSingleRequestRpcRetryingCaller(io.netty.util.HashedWheelTimerretryTimer,
+   AsyncConnectionImplconn,
+   TableNametableName,
+   byte[]row,
+   AsyncSingleRequestRpcRetryingCaller.CallableTcallable,
+   longpauseNs,
+   intmaxRetries,
+   longoperationTimeoutNs,
+   longrpcTimeoutNs,
+   
intstartLogErrorsCnt)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT
+call()
+
+
+private void
+call(HRegionLocationloc)
+
+
+private void
+completeExceptionally()
+
+
+private long
+elapsedMs()
+
+
+private void
+locateThenCall()
+
+
+private void
+onError(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwableerror,
+   

[20/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index c24f9b9..03c8b00 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -133,52 +133,48 @@
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.handler
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.tmpl.master
 
 
-
+
 org.apache.hadoop.hbase.tool
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
@@ -839,6 +835,10 @@
  intstopped)
 
 
+private 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface
+AsyncConnectionImpl.createRegionServerStub(ServerNameserverName)
+
+
 static ClusterConnection
 ConnectionUtils.createShortCircuitConnection(org.apache.hadoop.conf.Configurationconf,
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
@@ -850,59 +850,63 @@
  deserialization, networking, etc..) when talking to a local server.
 
 
-
+
 protected void
 AsyncProcess.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionbyte[]regions,
ServerNamesn)
 Decrements the counters for a given region and the region 
server.
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 ClusterConnection.getAdmin(ServerNameserverName)
 Establishes a connection to the region server at the 
specified address.
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 ConnectionImplementation.getAdmin(ServerNameserverName)
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 AsyncRequestFutureImpl.getBackoff(ServerNameserver,
   byte[]regionName)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 ClusterConnection.getClient(ServerNameserverName)
 Establishes a connection to the region server at the 
specified address, and returns
  a region client protocol.
 
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 ConnectionImplementation.getClient(ServerNamesn)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 CoprocessorHConnection.getClient(ServerNameserverName)
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collection? extends http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
 AsyncRequestFutureImpl.getNewMultiActionRunnable(ServerNameserver,
  MultiActionRowmultiAction,
  intnumAttempt)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 Admin.getOnlineRegions(ServerNamesn)
 Get all the online regions on a region server.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 HBaseAdmin.getOnlineRegions(ServerNamesn)
 
+
+(package private) 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface
+AsyncConnectionImpl.getRegionServerStub(ServerNameserverName)
+
 
 (package private) ServerStatistics
 ServerStatisticTracker.getServerStatsForTesting(ServerNameserver)
@@ -912,39 +916,47 @@
 ServerStatisticTracker.getStats(ServerNameserver)
 
 
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+ConnectionUtils.getStubKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringserviceName,
+  ServerNameserverName,
+  booleanhostnameCanChange)
+Get a unique key for the rpc stub to the given server.
+
+
+
 protected void
 PreemptiveFastFailInterceptor.handleFailureToServer(ServerNameserverName,
 

[02/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
index 0a7f43d..e65933f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
@@ -106,9 +106,6 @@ var activeTableTab = "activeTableTab";
 org.apache.hadoop.hbase.client.ClientServiceCallableT
 
 
-org.apache.hadoop.hbase.client.CancellableRegionServerCallableT
-
-
 org.apache.hadoop.hbase.client.NoncedRegionServerCallableT
 
 
@@ -117,8 +114,6 @@ var activeTableTab = "activeTableTab";
 
 
 
-
-
 
 
 
@@ -128,18 +123,18 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-Cancellable, RetryingCallableT
+RetryingCallableT
 
 
 
 @InterfaceAudience.Private
 public abstract class NoncedRegionServerCallableT
-extends CancellableRegionServerCallableT
+extends ClientServiceCallableT
 Implementations make an rpc call against a RegionService 
via a protobuf Service.
- Implement #rpcCall(RpcController) and then call CancellableRegionServerCallable.call(int)
 to
- trigger the rpc. The CancellableRegionServerCallable.call(int)
 eventually invokes your
+ Implement #rpcCall(RpcController) and then call RegionServerCallable.call(int)
 to
+ trigger the rpc. The RegionServerCallable.call(int)
 eventually invokes your
  #rpcCall(RpcController) meanwhile saving you having to write a bunch of
- boilerplate. The CancellableRegionServerCallable.call(int)
 implementation is from RpcRetryingCaller so rpcs are
+ boilerplate. The RegionServerCallable.call(int)
 implementation is from RpcRetryingCaller so rpcs are
  retried on fail.
 
  TODO: this class is actually tied to one region, because most of the paths 
make use of
@@ -221,25 +216,18 @@ extends 
-
-
-
-Methods inherited from classorg.apache.hadoop.hbase.client.CancellableRegionServerCallable
-call,
 cancel,
 doBulkLoadHFile,
 doCleanupBulkLoad,
 doMulti,
 doPrepareBulkLoad,
 doScan,
 isCancelled,
 prepare,
 setStubByServiceName
-
-
 
 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.ClientServiceCallable
-doGet,
 doMutate
+doGet,
 doMutate,
 setStubByServiceName
 
 
 
 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.RegionServerCallable
-getConnection,
 getExceptionMessageAdditionalDetail,
 getHRegionInfo,
 getLocation,
 getRow,
 getRpcController,
 getRpcControllerCellScanner,
 getStub,
 getTableName,
 rpcCall,
 setLocation,
 setRpcControllerCellScanner,
 setStub,
 sleep,
 throwable
+call,
 getConnection,
 getExceptionMessageAdditionalDetail,
 getHRegionInfo,
 getLocation,
 getRow,
 getRpcController,
 getRpcControllerCellScanner,
 getStub,
 getTableName,
 prepare,
 rpcCall,
 setLocation,
 setRpcControllerCellScanner,
 setStub,
 sleep,
 throwable
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/Query.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Query.html 
b/devapidocs/org/apache/hadoop/hbase/client/Query.html
index e5c81db..1c40f21 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Query.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Query.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -166,6 +166,10 @@ extends ISOLATION_LEVEL
 
 
+protected http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+loadColumnFamiliesOnDemand
+
+
 protected int
 targetReplicaId
 
@@ -209,53 +213,65 @@ extends Method and Description
 
 
+boolean
+doLoadColumnFamiliesOnDemand()
+Get the logical value indicating whether on-demand CF 
loading should be allowed.
+
+
+
 byte[]
 getACL()
 
-
+
 Authorizations
 getAuthorizations()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],TimeRange
 getColumnFamilyTimeRange()
 
-
+
 Consistency
 getConsistency()
 Returns the consistency level for this operation
 
 
-
+
 Filter
 getFilter()
 
-
+
 IsolationLevel
 getIsolationLevel()
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface 

[16/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
new file mode 100644
index 000..eddf9e3
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
@@ -0,0 +1,469 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+AsyncConnectionConfiguration (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
AsyncConnectionConfiguration
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.AsyncConnectionConfiguration
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+class AsyncConnectionConfiguration
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Timeout configs.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private int
+maxRetries
+
+
+private long
+metaOperationTimeoutNs
+
+
+private long
+operationTimeoutNs
+
+
+private long
+pauseNs
+
+
+private long
+readRpcTimeoutNs
+
+
+private int
+startLogErrorsCnt
+How many retries are allowed before we start to log
+
+
+
+private long
+writeRpcTimeoutNs
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+AsyncConnectionConfiguration(org.apache.hadoop.conf.Configurationconf)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+(package private) int
+getMaxRetries()
+
+
+(package private) long
+getMetaOperationTimeoutNs()
+
+
+(package private) long
+getOperationTimeoutNs()
+
+
+(package private) long
+getPauseNs()
+
+
+(package private) long
+getReadRpcTimeoutNs()
+
+
+(package private) int
+getStartLogErrorsCnt()
+
+
+(package private) long
+getWriteRpcTimeoutNs()
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or 

[27/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 5b130a5..c8e7e05 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -94,18 +94,12 @@
 
 abort(String,
 Throwable) - Method in class org.apache.hadoop.hbase.master.HMaster
 
-abort(MasterProcedureEnv)
 - Method in class org.apache.hadoop.hbase.master.procedure.DispatchMergingRegionsProcedure
-
 abort(MasterProcedureEnv)
 - Method in class org.apache.hadoop.hbase.master.procedure.RestoreSnapshotProcedure
 
 abort(MasterProcedureEnv)
 - Method in class org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure
 
 abort(MasterProcedureEnv)
 - Method in class org.apache.hadoop.hbase.master.procedure.TruncateTableProcedure
 
-abort
 - Variable in class org.apache.hadoop.hbase.mob.mapreduce.SweepJob.DummyMobAbortable
-
-abort(String,
 Throwable) - Method in class 
org.apache.hadoop.hbase.mob.mapreduce.SweepJob.DummyMobAbortable
-
 abort(String)
 - Method in interface org.apache.hadoop.hbase.monitoring.MonitoredTask
 
 abort(String)
 - Method in class org.apache.hadoop.hbase.monitoring.MonitoredTaskImpl
@@ -380,6 +374,10 @@
 
 AbstractPositionedByteRange()
 - Constructor for class org.apache.hadoop.hbase.util.AbstractPositionedByteRange
 
+AbstractProcedureScheduler - Class in org.apache.hadoop.hbase.procedure2
+
+AbstractProcedureScheduler()
 - Constructor for class org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler
+
 AbstractProtobufLogWriter - Class in org.apache.hadoop.hbase.regionserver.wal
 
 Base class for Protobuf log writer.
@@ -438,8 +436,6 @@
 
 accept(Class?)
 - Method in class org.apache.hadoop.hbase.mapreduce.ResultSerialization
 
-accept(Path)
 - Method in class org.apache.hadoop.hbase.mob.mapreduce.SweepReducer.PathPrefixFilter
-
 accept(Path,
 Boolean) - Method in class org.apache.hadoop.hbase.util.AbstractFileStatusFilter
 
 Filters out a path.
@@ -718,6 +714,8 @@
 
 action - 
Variable in class org.apache.hadoop.hbase.client.Action
 
+action(AsyncSingleRequestRpcRetryingCaller.CallableT)
 - Method in class org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder
+
 action
 - Variable in class org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent
 
 Action(BaseLoadBalancer.Cluster.Action.Type)
 - Constructor for class org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action
@@ -918,8 +916,6 @@
 
 add(ProcedureProtos.Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFormatReader.WalProcedureMap
 
-add(E)
 - Method in class org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue
-
 add(IterableCell)
 - Method in class org.apache.hadoop.hbase.regionserver.AbstractMemStore
 
 add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.AbstractMemStore
@@ -931,9 +927,13 @@
 add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.CellSet
 
 add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.HStore
-
+
+Adds a value to the memstore
+
 add(IterableCell)
 - Method in class org.apache.hadoop.hbase.regionserver.HStore
-
+
+Adds the specified value to the memstore
+
 add(Cell)
 - Method in interface org.apache.hadoop.hbase.regionserver.MemStore
 
 Write an update
@@ -954,14 +954,6 @@
 
 Add the specified KeyValue to the list of deletes to check 
against for this row operation.
 
-add(Cell)
 - Method in interface org.apache.hadoop.hbase.regionserver.Store
-
-Adds a value to the memstore
-
-add(IterableCell)
 - Method in interface org.apache.hadoop.hbase.regionserver.Store
-
-Adds the specified value to the memstore
-
 add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.wal.WALEdit
 
 add(String)
 - Method in class org.apache.hadoop.hbase.rest.client.Cluster
@@ -1157,16 +1149,14 @@
 
 Adds all the attributes into the Operation object
 
-addBack(Procedure)
 - Method in class org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler
-
 addBack(Procedure)
 - Method in class org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.QueueImpl
 
-addBack(Procedure)
 - Method in interface org.apache.hadoop.hbase.procedure2.ProcedureRunnableSet
+addBack(Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler
+
+addBack(Procedure)
 - Method in interface org.apache.hadoop.hbase.procedure2.ProcedureScheduler
 
 Inserts the specified element at the end of this 
queue.
 
-addBack(Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureSimpleRunQueue
-
 addBlock(ClientProtocol,
 String, String, ExtendedBlock, DatanodeInfo[], long, String[]) - 
Method in interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.BlockAdder
 
 addBloomFilter(BloomFilterWriter,
 BlockType) - Method in class 

[38/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
index e22025b..87668ac 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
@@ -450,222 +450,226 @@
 442private final int maxRetryInQueue;
 443private final AtomicInteger 
retryInQueue = new AtomicInteger(0);
 444private final int writeRpcTimeout; // 
needed to pass in through AsyncProcess constructor
-445
-446public FlushWorker(Configuration 
conf, ClusterConnection conn, HRegionLocation addr,
-447HTableMultiplexer 
htableMultiplexer, int perRegionServerBufferQueueSize,
-448ExecutorService pool, 
ScheduledExecutorService executor) {
-449  this.addr = addr;
-450  this.multiplexer = 
htableMultiplexer;
-451  this.queue = new 
LinkedBlockingQueue(perRegionServerBufferQueueSize);
-452  RpcRetryingCallerFactory 
rpcCallerFactory = RpcRetryingCallerFactory.instantiate(conf);
-453  RpcControllerFactory 
rpcControllerFactory = RpcControllerFactory.instantiate(conf);
-454  this.writeRpcTimeout = 
conf.getInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY,
-455  
conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY,
-456  
HConstants.DEFAULT_HBASE_RPC_TIMEOUT));
-457  this.ap = new AsyncProcess(conn, 
conf, pool, rpcCallerFactory, false, rpcControllerFactory, writeRpcTimeout);
-458  this.executor = executor;
-459  this.maxRetryInQueue = 
conf.getInt(TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE, 1);
-460}
-461
-462protected 
LinkedBlockingQueuePutStatus getQueue() {
-463  return this.queue;
+445private final int operationTimeout;
+446
+447public FlushWorker(Configuration 
conf, ClusterConnection conn, HRegionLocation addr,
+448HTableMultiplexer 
htableMultiplexer, int perRegionServerBufferQueueSize,
+449ExecutorService pool, 
ScheduledExecutorService executor) {
+450  this.addr = addr;
+451  this.multiplexer = 
htableMultiplexer;
+452  this.queue = new 
LinkedBlockingQueue(perRegionServerBufferQueueSize);
+453  RpcRetryingCallerFactory 
rpcCallerFactory = RpcRetryingCallerFactory.instantiate(conf);
+454  RpcControllerFactory 
rpcControllerFactory = RpcControllerFactory.instantiate(conf);
+455  this.writeRpcTimeout = 
conf.getInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY,
+456  
conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY,
+457  
HConstants.DEFAULT_HBASE_RPC_TIMEOUT));
+458  this.operationTimeout = 
conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
+459  
HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
+460  this.ap = new AsyncProcess(conn, 
conf, pool, rpcCallerFactory, false, rpcControllerFactory,
+461  writeRpcTimeout, 
operationTimeout);
+462  this.executor = executor;
+463  this.maxRetryInQueue = 
conf.getInt(TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE, 1);
 464}
 465
-466public long getTotalFailedCount() {
-467  return totalFailedPutCount.get();
+466protected 
LinkedBlockingQueuePutStatus getQueue() {
+467  return this.queue;
 468}
 469
-470public long getTotalBufferedCount() 
{
-471  return queue.size() + 
currentProcessingCount.get();
+470public long getTotalFailedCount() {
+471  return totalFailedPutCount.get();
 472}
 473
-474public AtomicAverageCounter 
getAverageLatencyCounter() {
-475  return this.averageLatency;
+474public long getTotalBufferedCount() 
{
+475  return queue.size() + 
currentProcessingCount.get();
 476}
 477
-478public long getMaxLatency() {
-479  return 
this.maxLatency.getAndSet(0);
+478public AtomicAverageCounter 
getAverageLatencyCounter() {
+479  return this.averageLatency;
 480}
 481
-482boolean resubmitFailedPut(PutStatus 
ps, HRegionLocation oldLoc) throws IOException {
-483  // Decrease the retry count
-484  final int retryCount = 
ps.maxAttempCount - 1;
+482public long getMaxLatency() {
+483  return 
this.maxLatency.getAndSet(0);
+484}
 485
-486  if (retryCount = 0) {
-487// Update the failed counter and 
no retry any more.
-488return false;
-489  }
-490
-491  int cnt = 
getRetryInQueue().incrementAndGet();
-492  if (cnt  getMaxRetryInQueue()) 
{
-493// Too many Puts in queue for 
resubmit, give up this
-494
getRetryInQueue().decrementAndGet();
-495return false;
-496  }
-497
-498  final Put failedPut = ps.put;
-499  // The currentPut is 

[42/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index f9579b2..a220262 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -207,7 +207,7 @@ implements org.apache.hadoop.util.Tool
 
 
 
-void
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 doBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
   Adminadmin,
   Tabletable,
@@ -241,7 +241,7 @@ implements org.apache.hadoop.util.Tool
 
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem
+protected Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 groupOrSplit(com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups,
 
org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemitem,
 Tabletable,
@@ -315,7 +315,7 @@ implements org.apache.hadoop.util.Tool
 run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String[]args)
 
 
-int
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringdirPath,
http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
TableNametableName)
@@ -507,14 +507,14 @@ implements org.apache.hadoop.util.Tool
 
 
 doBulkLoad
-publicvoiddoBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
-   Adminadmin,
-   Tabletable,
-   RegionLocatorregionLocator,
-   booleansilence,
-   booleancopyFile)
-throws TableNotFoundException,
-   http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringdoBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
+   Adminadmin,
+   Tabletable,
+   RegionLocatorregionLocator,
+   booleansilence,
+   booleancopyFile)
+throws TableNotFoundException,
+   http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Perform a bulk load of the given directory into the given
  

[18/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index fcd1bb3..2cf6bde 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -142,7 +142,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-void
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 LoadIncrementalHFiles.doBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
   Adminadmin,
   Tabletable,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
index 0afbdd7..89a1f85 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
@@ -306,164 +306,160 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.monitoring
 
 
-
+
 org.apache.hadoop.hbase.namespace
 
 
-
+
 org.apache.hadoop.hbase.nio
 
 
-
+
 org.apache.hadoop.hbase.procedure
 
 
-
+
 org.apache.hadoop.hbase.procedure.flush
 
 
-
+
 org.apache.hadoop.hbase.procedure2
 
 
-
+
 org.apache.hadoop.hbase.procedure2.store
 
 
-
+
 org.apache.hadoop.hbase.procedure2.store.wal
 
 
-
+
 org.apache.hadoop.hbase.procedure2.util
 
 
-
+
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.compactions
 
 
-
+
 org.apache.hadoop.hbase.regionserver.handler
 
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher
 
 
-
+
 org.apache.hadoop.hbase.regionserver.snapshot
 
 
-
+
 org.apache.hadoop.hbase.regionserver.throttle
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.replication.master
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.filter
 
 
-
+
 org.apache.hadoop.hbase.rest.model
 
 
-
+
 org.apache.hadoop.hbase.rest.provider
 
 
-
+
 org.apache.hadoop.hbase.rest.provider.consumer
 
 
-
+
 org.apache.hadoop.hbase.rest.provider.producer
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.security.token
 
 
-
+
 org.apache.hadoop.hbase.security.visibility
 
 
-
+
 org.apache.hadoop.hbase.security.visibility.expression
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.thrift
 
 Provides an HBase http://incubator.apache.org/thrift/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.thrift2
 
 Provides an HBase http://thrift.apache.org/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.trace
 
 
-
+
 org.apache.hadoop.hbase.types
 
 
@@ -471,51 +467,51 @@ service.
  extensible data type API.
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.util.byterange
 
 
-
+
 org.apache.hadoop.hbase.util.byterange.impl
 
 
-
+
 org.apache.hadoop.hbase.util.hbck
 
 
-
+
 org.apache.hadoop.hbase.util.test
 
 
-
+
 org.apache.hadoop.hbase.util.vint
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
-
+
 org.apache.hadoop.hbase.zookeeper.lock
 
 
-
+
 org.apache.hadoop.metrics2.impl
 
 
-
+
 org.apache.hadoop.metrics2.lib
 
 
-
+
 org.apache.hadoop.metrics2.util
 
 
@@ -1181,23 +1177,65 @@ service.
 
 
 (package private) class
+AsyncConnectionConfiguration
+Timeout configs.
+
+
+
+(package private) class
+AsyncConnectionImpl
+The implementation of AsyncConnection.
+
+
+
+(package private) class
 AsyncProcess
 This class  allows a continuous flow of requests.
 
 
 
+(package private) class
+AsyncRegionLocator
+TODO: reimplement using aync 

[01/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 344fa3264 -> 9d13f2d98


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/Scan.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Scan.html 
b/devapidocs/org/apache/hadoop/hbase/client/Scan.html
index 73c7ec3..64e7f6f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Scan.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Scan.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -231,36 +231,32 @@ extends 
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-loadColumnFamiliesOnDemand
-
-
 private static 
org.apache.commons.logging.Log
 LOG
 
-
+
 private long
 maxResultSize
 
-
+
 private int
 maxVersions
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RAW_ATTR
 
-
+
 private boolean
 reversed
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SCAN_ATTRIBUTES_METRICS_DATA
 Deprecated.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SCAN_ATTRIBUTES_METRICS_ENABLE
 Deprecated.
@@ -268,11 +264,11 @@ extends 
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 SCAN_ATTRIBUTES_TABLE_NAME
 
-
+
 private boolean
 small
 Set it true for small scan to get better performance
@@ -289,23 +285,23 @@ extends 
 
 
-
+
 private byte[]
 startRow
 
-
+
 private byte[]
 stopRow
 
-
+
 private int
 storeLimit
 
-
+
 private int
 storeOffset
 
-
+
 private TimeRange
 tr
 
@@ -315,7 +311,7 @@ extends 
 
 Fields inherited from classorg.apache.hadoop.hbase.client.Query
-colFamTimeRangeMap,
 consistency,
 filter,
 targetReplicaId
+colFamTimeRangeMap,
 consistency,
 filter,
 loadColumnFamiliesOnDemand,
 targetReplicaId
 
 
 
@@ -404,185 +400,173 @@ extends 
 
 boolean
-doLoadColumnFamiliesOnDemand()
-Get the logical value indicating whether on-demand CF 
loading should be allowed.
-
-
-
-boolean
 getAllowPartialResults()
 
-
+
 int
 getBatch()
 
-
+
 boolean
 getCacheBlocks()
 Get whether blocks should be cached for this Scan.
 
 
-
+
 int
 getCaching()
 
-
+
 byte[][]
 getFamilies()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in 
java.util">NavigableSetbyte[]
 getFamilyMap()
 Getting the familyMap
 
 
-
+
 Filter
 getFilter()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 getFingerprint()
 Compile the table and column family (i.e.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-getLoadColumnFamiliesOnDemandValue()
-Get the raw loadColumnFamiliesOnDemand setting; if it's not 
set, can be null.
-
-
-
+
 long
 getMaxResultSize()
 
-
+
 int
 

[09/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
new file mode 100644
index 000..2695dde
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
@@ -0,0 +1,711 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+AsyncTableImpl (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class AsyncTableImpl
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.AsyncTableImpl
+
+
+
+
+
+
+
+All Implemented Interfaces:
+AsyncTable
+
+
+
+@InterfaceAudience.Private
+class AsyncTableImpl
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements AsyncTable
+The implementation of AsyncTable.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+private static interface
+AsyncTableImpl.ConverterD,I,S
+
+
+private static interface
+AsyncTableImpl.RpcCallRESP,REQ
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private AsyncConnectionImpl
+conn
+
+
+private long
+operationTimeoutNs
+
+
+private long
+readRpcTimeoutNs
+
+
+private TableName
+tableName
+
+
+private long
+writeRpcTimeoutNs
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+AsyncTableImpl(AsyncConnectionImplconn,
+  TableNametableName)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+private static REQ,PREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureRESP
+call(HBaseRpcControllercontroller,
+HRegionLocationloc,
+
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interfacestub,
+REQreq,
+AsyncTableImpl.ConverterPREQ,byte[],REQreqConvert,
+AsyncTableImpl.RpcCallPRESP,PREQrpcCall,
+AsyncTableImpl.ConverterRESP,HBaseRpcController,PRESPrespConverter)
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+delete(Deletedelete)
+Deletes the specified cells/row.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+exists(Getget)
+Test for the existence of columns in the table, as 
specified by the Get.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureResult
+get(Getget)
+Extracts certain cells from a given row.
+
+
+
+org.apache.hadoop.conf.Configuration
+getConfiguration()
+Returns the Configuration object used by this 
instance.
+
+
+
+TableName
+getName()
+Gets the fully qualified table name instance of this 
table.
+
+
+
+long
+getOperationTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;

[21/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index 37c4991..35afe20 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -138,38 +138,42 @@
 
 
 
-org.apache.hadoop.hbase.procedure2.store.wal
+org.apache.hadoop.hbase.procedure2
 
 
 
-org.apache.hadoop.hbase.quotas
+org.apache.hadoop.hbase.procedure2.store.wal
 
 
 
-org.apache.hadoop.hbase.regionserver
+org.apache.hadoop.hbase.quotas
 
 
 
-org.apache.hadoop.hbase.regionserver.wal
+org.apache.hadoop.hbase.regionserver
 
 
 
-org.apache.hadoop.hbase.rsgroup
+org.apache.hadoop.hbase.regionserver.wal
 
 
 
-org.apache.hadoop.hbase.security
+org.apache.hadoop.hbase.rsgroup
 
 
 
-org.apache.hadoop.hbase.security.visibility
+org.apache.hadoop.hbase.security
 
 
 
-org.apache.hadoop.hbase.snapshot
+org.apache.hadoop.hbase.security.visibility
 
 
 
+org.apache.hadoop.hbase.snapshot
+
+
+
 org.apache.hadoop.hbase.util
 
 
@@ -699,6 +703,24 @@
 
 
 
+
+
+
+Uses of HBaseIOException in org.apache.hadoop.hbase.procedure2
+
+Subclasses of HBaseIOException in org.apache.hadoop.hbase.procedure2
+
+Modifier and Type
+Class and Description
+
+
+
+class
+BadProcedureException
+
+
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index dc52d45..44e2d29 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -129,47 +129,43 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.security.visibility
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.thrift
 
 Provides an HBase http://incubator.apache.org/thrift/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.tool
 
 
@@ -1445,68 +1441,6 @@ service.
 
 
 
-
-
-
-Uses of HColumnDescriptor in org.apache.hadoop.hbase.mob.mapreduce
-
-Fields in org.apache.hadoop.hbase.mob.mapreduce
 declared as HColumnDescriptor
-
-Modifier and Type
-Field and Description
-
-
-
-private HColumnDescriptor
-SweepReducer.family
-
-
-private HColumnDescriptor
-MemStoreWrapper.hcd
-
-
-
-
-Methods in org.apache.hadoop.hbase.mob.mapreduce
 with parameters of type HColumnDescriptor
-
-Modifier and Type
-Method and Description
-
-
-
-private void
-SweepJob.removeUnusedFiles(org.apache.hadoop.mapreduce.Jobjob,
- TableNametn,
- HColumnDescriptorhcd)
-Archives unused mob files.
-
-
-
-int
-SweepJob.sweep(TableNametn,
- HColumnDescriptorfamily)
-Runs MapReduce to do the sweeping on the mob files.
-
-
-
-
-
-Constructors in org.apache.hadoop.hbase.mob.mapreduce
 with parameters of type HColumnDescriptor
-
-Constructor and Description
-
-
-
-MemStoreWrapper(org.apache.hadoop.mapreduce.Reducer.Contextcontext,
-   org.apache.hadoop.fs.FileSystemfs,
-   BufferedMutatortable,
-   HColumnDescriptorhcd,
-   MemStorememstore,
-   CacheConfigcacheConfig)
-
-
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 9c5db45..7504456 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -437,6 +437,30 @@ service.
 
 
 
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+AsyncTableRegionLocator.getRegionLocation(byte[]row)
+Finds the region on which the given row is being 
served.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 

[19/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 166303b..9d9025d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -177,85 +177,81 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.namespace
 
 
-
+
 org.apache.hadoop.hbase.procedure.flush
 
 
-
+
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.security.visibility
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.thrift
 
 Provides an HBase http://incubator.apache.org/thrift/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.tool
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hbase.archetypes.exemplars.client
 
 This package provides fully-functional exemplar Java code 
demonstrating
@@ -263,7 +259,7 @@ service.
  archetype with hbase-client dependency.
 
 
-
+
 org.apache.hbase.archetypes.exemplars.shaded_client
 
 This package provides fully-functional exemplar Java code 
demonstrating
@@ -783,8 +779,16 @@ service.
 
 
 private TableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
+
+
+private TableName
 AsyncRequestFutureImpl.tableName
 
+
+private TableName
+AsyncTableImpl.tableName
+
 
 protected TableName
 RpcRetryingCallerWithReadReplicas.tableName
@@ -799,16 +803,24 @@ service.
 
 
 private TableName
-TableState.tableName
+AsyncTableRegionLocatorImpl.tableName
 
 
 private TableName
-ClientScanner.tableName
+TableState.tableName
 
 
 private TableName
+ClientScanner.tableName
+
+
+private TableName
 HTable.tableName
 
+
+private TableName
+AsyncSingleRequestRpcRetryingCaller.tableName
+
 
 
 
@@ -851,10 +863,20 @@ service.
 
 
 TableName
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table name instance of the table 
whose region we want to locate.
+
+
+
+TableName
 RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
+
+TableName
+AsyncTableImpl.getName()
+
 
 TableName
 BufferedMutator.getName()
@@ -867,6 +889,16 @@ service.
 
 
 TableName
+AsyncTable.getName()
+Gets the fully qualified table name instance of this 
table.
+
+
+
+TableName
+AsyncTableRegionLocatorImpl.getName()
+
+
+TableName
 HTable.getName()
 
 
@@ -1215,7 +1247,7 @@ service.
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
 booleanneedResults,
 CancellableRegionServerCallablecallable,
-intcurTimeout)
+intrpcTimeout)
 
 
 private MultiServerCallableRow
@@ -1461,6 +1493,12 @@ service.
 ConnectionImplementation.getNumberOfCachedRegionLocations(TableNametableName)
 
 
+(package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation
+AsyncRegionLocator.getRegionLocation(TableNametableName,
+ byte[]row,
+ booleanreload)
+
+
 HRegionLocation
 ClusterConnection.getRegionLocation(TableNametableName,
  byte[]row,
@@ -1468,13 +1506,13 @@ service.
 Find region location hosting passed row
 
 
-
+
 HRegionLocation
 ConnectionImplementation.getRegionLocation(TableNametableName,
  byte[]row,
  booleanreload)
 
-
+
 (package private) static RegionLocations
 RpcRetryingCallerWithReadReplicas.getRegionLocations(booleanuseCache,
   intreplicaId,
@@ -1482,7 +1520,7 @@ service.
   TableNametableName,
   byte[]row)
 
-
+
 static RegionLocations
 RegionAdminServiceCallable.getRegionLocations(ClusterConnectionconnection,
   TableNametableName,
@@ -1490,6 +1528,12 @@ service.
   booleanuseCache,
   intreplicaId)
 
+
+AsyncTableRegionLocator
+AsyncConnection.getRegionLocator(TableNametableName)
+Retrieve a AsyncRegionLocator implementation to inspect 
region information on a table.
+
+
 
 RegionLocator
 

[11/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
new file mode 100644
index 000..0c78430
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html
@@ -0,0 +1,449 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+
+
+
+
+
+
+
+Enclosing class:
+AsyncRpcRetryingCallerFactory
+
+
+
+public class AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private AsyncSingleRequestRpcRetryingCaller.CallableT
+callable
+
+
+private long
+operationTimeoutNs
+
+
+private byte[]
+row
+
+
+private long
+rpcTimeoutNs
+
+
+private TableName
+tableName
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+SingleRequestCallerBuilder()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+action(AsyncSingleRequestRpcRetryingCaller.CallableTcallable)
+
+
+AsyncSingleRequestRpcRetryingCallerT
+build()
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureT
+call()
+Shortcut for build().call()
+
+
+
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+operationTimeout(longoperationTimeout,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+
+
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+row(byte[]row)
+
+
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+rpcTimeout(longrpcTimeout,
+  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+
+
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT
+table(TableNametableName)
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, 

[03/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
index 6e2b211..85fd23a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
@@ -307,7 +307,7 @@ implements void
 batch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
- inttimeout)
+ intrpcTimeout)
 
 
 Rvoid
@@ -1308,7 +1308,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 batch
 publicvoidbatch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
-  inttimeout)
+  intrpcTimeout)
throws http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,
   http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -1324,7 +1324,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 batchCallback
-publicRvoidbatchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+publicRvoidbatchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
   Batch.CallbackRcallback)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
@@ -1345,7 +1345,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 doBatchWithCallback
-public staticRvoiddoBatchWithCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+public staticRvoiddoBatchWithCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
Batch.CallbackRcallback,
ClusterConnectionconnection,
@@ -1366,7 +1366,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 delete
-publicvoiddelete(Deletedelete)
+publicvoiddelete(Deletedelete)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deletes the specified cells/row.
 
@@ -1385,7 +1385,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 delete
-publicvoiddelete(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListDeletedeletes)
+publicvoiddelete(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListDeletedeletes)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deletes the specified cells/rows in bulk.
 
@@ -1409,7 +1409,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 put
-publicvoidput(Putput)
+publicvoidput(Putput)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Puts some data in the table.
 
@@ -1428,7 +1428,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 put
-publicvoidput(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPutputs)
+publicvoidput(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 

[46/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/Get.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Get.html 
b/apidocs/org/apache/hadoop/hbase/client/Get.html
index 5f03b63..72734d4 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Get.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Get.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -171,7 +171,7 @@ implements 
 
 Fields inherited from classorg.apache.hadoop.hbase.client.Query
-colFamTimeRangeMap,
 consistency,
 filter,
 targetReplicaId
+colFamTimeRangeMap,
 consistency,
 filter,
 loadColumnFamiliesOnDemand,
 targetReplicaId
 
 
 
@@ -398,35 +398,42 @@ implements 
 
 Get
+setLoadColumnFamiliesOnDemand(booleanvalue)
+Set the value indicating whether loading CFs on demand 
should be allowed (cluster
+ default is false).
+
+
+
+Get
 setMaxResultsPerColumnFamily(intlimit)
 Set the maximum number of values to return per row per 
Column Family
 
 
-
+
 Get
 setMaxVersions()
 Get all available versions.
 
 
-
+
 Get
 setMaxVersions(intmaxVersions)
 Get up to the specified number of versions of each 
column.
 
 
-
+
 Get
 setReplicaId(intId)
 Specify region replica id where Query will fetch data 
from.
 
 
-
+
 Get
 setRowOffsetPerColumnFamily(intoffset)
 Set offset for the row per Column Family.
 
 
-
+
 Get
 setTimeRange(longminStamp,
 longmaxStamp)
@@ -434,13 +441,13 @@ implements 
 
 
-
+
 Get
 setTimeStamp(longtimestamp)
 Get versions of columns with the specified timestamp.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 toMap(intmaxCols)
 Compile the details beyond the scope of getFingerprint 
(row, columns,
@@ -453,7 +460,7 @@ implements 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.Query
-getACL,
 getAuthorizations,
 getColumnFamilyTimeRange,
 getConsistency,
 getFilter,
 getIsolationLevel,
 getReplicaId
+doLoadColumnFamiliesOnDemand,
 getACL,
 getAuthorizations,
 getColumnFamilyTimeRange,
 getConsistency,
 getFilter,
 getIsolationLevel,
 getLoadColumnFamiliesOnDemandValue,
 get
 ReplicaId
 
 
 
@@ -535,7 +542,7 @@ implements 
 
 isCheckExistenceOnly
-publicbooleanisCheckExistenceOnly()
+publicbooleanisCheckExistenceOnly()
 
 
 
@@ -544,7 +551,7 @@ implements 
 
 setCheckExistenceOnly
-publicGetsetCheckExistenceOnly(booleancheckExistenceOnly)
+publicGetsetCheckExistenceOnly(booleancheckExistenceOnly)
 
 
 
@@ -554,7 +561,7 @@ implements 
 isClosestRowBefore
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicbooleanisClosestRowBefore()
+publicbooleanisClosestRowBefore()
 Deprecated.since 2.0.0 and will be removed in 3.0.0
 This will always return the default value which is false as 
client cannot set the value to this
  property any more.
@@ -567,7 +574,7 @@ publicboolean
 setClosestRowBefore
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicGetsetClosestRowBefore(booleanclosestRowBefore)
+publicGetsetClosestRowBefore(booleanclosestRowBefore)
 Deprecated.since 2.0.0 and will be removed in 3.0.0
 This is not used any more and does nothing. Use reverse 
scan instead.
 
@@ -578,7 +585,7 @@ public
 
 addFamily
-publicGetaddFamily(byte[]family)
+publicGetaddFamily(byte[]family)
 Get all columns from the specified family.
  
  Overrides previous calls to addColumn for this family.
@@ -596,7 +603,7 @@ public
 
 addColumn
-publicGetaddColumn(byte[]family,
+publicGetaddColumn(byte[]family,
  byte[]qualifier)
 Get the column from the specific family with the 

[28/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 13f153e..9027794 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -600,185 +600,188 @@
 org.apache.hadoop.hbase.http.HttpServer.getPort()
 
 
+org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv.getProcedureQueue()
+
+
 org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 
-
+
 org.apache.hadoop.hbase.regionserver.Region.getReadpoint(IsolationLevel)
 Since 1.2.0. Use Region.getReadPoint(IsolationLevel)
 instead.
 
 
-
+
 org.apache.hadoop.hbase.MetaTableAccessor.getRegion(Connection,
 byte[])
 use MetaTableAccessor.getRegionLocation(Connection,
 byte[]) instead
 
 
-
+
 org.apache.hadoop.hbase.regionserver.HRegion.getRegionDir(Path,
 HRegionInfo)
 For tests only; to be 
removed.
 
 
-
+
 org.apache.hadoop.hbase.regionserver.HRegion.getRegionDir(Path,
 String)
 For tests only; to be 
removed.
 
 
-
+
 org.apache.hadoop.hbase.TableName.getRowComparator()
 The comparator is an 
internal property of the table. Should
  not have been exposed here
 
 
-
+
 org.apache.hadoop.hbase.client.Table.getRpcTimeout()
 Use getReadRpcTimeout or 
getWriteRpcTimeout instead
 
 
-
+
 org.apache.hadoop.hbase.client.HTable.getRpcTimeout()
 
-
+
 org.apache.hadoop.hbase.client.HTableWrapper.getRpcTimeout()
 
-
+
 org.apache.hadoop.hbase.rest.client.RemoteHTable.getRpcTimeout()
 
-
+
 org.apache.hadoop.hbase.MetaTableAccessor.getScanForTableName(Connection,
 TableName)
 
-
+
 org.apache.hadoop.hbase.regionserver.StoreFileReader.getScanner(boolean,
 boolean)
 Do not write further code 
which depends on this call. Instead
use getStoreFileScanner() which uses the StoreFileScanner class/interface
which is the preferred way to scan a store with higher level 
concepts.
 
 
-
+
 org.apache.hadoop.hbase.regionserver.StoreFileReader.getScanner(boolean,
 boolean, boolean)
 Do not write further code 
which depends on this call. Instead
use getStoreFileScanner() which uses the StoreFileScanner class/interface
which is the preferred way to scan a store with higher level 
concepts.
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.KVComparator.getShortMidpointKey(byte[],
 byte[])
 Since 0.99.2;
 
 
-
+
 org.apache.hadoop.hbase.io.ImmutableBytesWritable.getSize()
 use ImmutableBytesWritable.getLength()
 instead
 
 
-
+
 org.apache.hadoop.hbase.util.Bytes.getSize()
 use Bytes.getLength()
 instead
 
 
-
+
 org.apache.hadoop.hbase.regionserver.HStore.getStoreHomedir(Path,
 HRegionInfo, byte[])
 
-
+
 org.apache.hadoop.hbase.regionserver.HStore.getStoreHomedir(Path,
 String, byte[])
 
-
+
 org.apache.hadoop.hbase.HTableDescriptor.getTableDir(Path,
 byte[])
 
-
+
 org.apache.hadoop.hbase.client.HTableInterface.getTableName()
 Use Table.getName()
 instead
 
 
-
+
 org.apache.hadoop.hbase.util.ZKDataMigrator.getTableState(ZooKeeperWatcher,
 TableName)
 
-
+
 org.apache.hadoop.hbase.KeyValue.getType()
 
-
+
 org.apache.hadoop.hbase.io.hfile.HFileScanner.getValueString()
 Since 
hbase-2.0.0
 
 
-
+
 org.apache.hadoop.hbase.client.Table.getWriteBufferSize()
 as of 1.0.1 (should not 
have been in 1.0.0). Replaced by BufferedMutator.getWriteBufferSize()
 
 
-
+
 org.apache.hadoop.hbase.client.HTableInterface.getWriteBufferSize()
 as of 1.0.0. Replaced by 
BufferedMutator.getWriteBufferSize()
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initCredentialsForCluster(Job,
 String)
 Since 1.2.0, use TableMapReduceUtil.initCredentialsForCluster(Job,
 Configuration) instead.
 
 
-
+
 org.apache.hadoop.hbase.regionserver.HRegion.initialize()
 use 
HRegion.createHRegion() or HRegion.openHRegion()
 
 
-
+
 org.apache.hadoop.hbase.client.HTableInterface.isAutoFlush()
 as of 1.0.0. Replaced by 
BufferedMutator
 
 
-
+
 org.apache.hadoop.hbase.client.Get.isClosestRowBefore()
 since 2.0.0 and will be 
removed in 3.0.0
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.iscreate(InputStream)
 
-
+
 org.apache.hadoop.hbase.client.ClusterConnection.isDeadServer(ServerName)
 internal method, do not 
use thru ClusterConnection
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.isDelete()
 
-
+
 org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate.isLogDeletable(FileStatus)
 
-
+
 org.apache.hadoop.hbase.client.ClusterConnection.isMasterRunning()
 this has been deprecated 
without a replacement
 
 
-
+
 org.apache.hadoop.hbase.client.ConnectionImplementation.isMasterRunning()
 this has been deprecated 
without a replacement
 
 
-
+
 org.apache.hadoop.hbase.security.visibility.VisibilityClient.listLabels(Configuration,
 String)
 Use VisibilityClient.listLabels(Connection,String)
 instead.
 
 
-
+
 org.apache.hadoop.hbase.CellUtil.matchingRow(Cell,
 Cell)
 As of release 2.0.0, this 
will be 

[32/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/book.html
--
diff --git a/book.html b/book.html
index 876f41c..2810c41 100644
--- a/book.html
+++ b/book.html
@@ -4,11 +4,11 @@
 
 
 
-
+
 
 Apache HBase  Reference Guide
 
-https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.4.0/css/font-awesome.min.css;>
+https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.2.0/css/font-awesome.min.css;>
 
 
 
@@ -93,190 +93,191 @@
 42. Constraints
 43. Schema Design Case Studies
 44. Operational and Performance Configuration 
Options
+45. Special Cases
 
 
 HBase and MapReduce
 
-45. HBase, MapReduce, and the 
CLASSPATH
-46. MapReduce Scan Caching
-47. Bundled HBase MapReduce 
Jobs
-48. HBase 
as a MapReduce Job Data Source and Data Sink
-49. Writing HFiles 
Directly During Bulk Import
-50. RowCounter Example
-51. Map-Task Splitting
-52. HBase MapReduce Examples
-53. Accessing Other HBase Tables in a 
MapReduce Job
-54. Speculative Execution
-55. Cascading
+46. HBase, MapReduce, and the 
CLASSPATH
+47. MapReduce Scan Caching
+48. Bundled HBase MapReduce 
Jobs
+49. HBase 
as a MapReduce Job Data Source and Data Sink
+50. Writing HFiles 
Directly During Bulk Import
+51. RowCounter Example
+52. Map-Task Splitting
+53. HBase MapReduce Examples
+54. Accessing Other HBase Tables in a 
MapReduce Job
+55. Speculative Execution
+56. Cascading
 
 
 Securing Apache HBase
 
-56. Using Secure HTTP 
(HTTPS) for the Web UI
-57. Using SPNEGO for Kerberos 
authentication with Web UIs
-58. Secure Client Access to Apache 
HBase
-59. Simple User Access to 
Apache HBase
-60. Securing Access to 
HDFS and ZooKeeper
-61. Securing Access To Your 
Data
-62. Security Configuration 
Example
+57. Using Secure HTTP 
(HTTPS) for the Web UI
+58. Using SPNEGO for Kerberos 
authentication with Web UIs
+59. Secure Client Access to Apache 
HBase
+60. Simple User Access to 
Apache HBase
+61. Securing Access to 
HDFS and ZooKeeper
+62. Securing Access To Your 
Data
+63. Security Configuration 
Example
 
 
 Architecture
 
-63. Overview
-64. Catalog Tables
-65. Client
-66. Client Request Filters
-67. Master
-68. RegionServer
-69. Regions
-70. Bulk Loading
-71. HDFS
-72. Timeline-consistent High 
Available Reads
-73. Storing Medium-sized Objects (MOB)
+64. Overview
+65. Catalog Tables
+66. Client
+67. Client Request Filters
+68. Master
+69. RegionServer
+70. Regions
+71. Bulk Loading
+72. HDFS
+73. Timeline-consistent High 
Available Reads
+74. Storing Medium-sized Objects (MOB)
 
 
 Apache HBase APIs
 
-74. Examples
+75. Examples
 
 
 Apache HBase External APIs
 
-75. REST
-76. Thrift
-77. C/C++ Apache HBase Client
-78. Using Java Data Objects (JDO) with HBase
-79. Scala
-80. Jython
+76. REST
+77. Thrift
+78. C/C++ Apache HBase Client
+79. Using Java Data Objects (JDO) with HBase
+80. Scala
+81. Jython
 
 
 Thrift API and Filter Language
 
-81. Filter Language
+82. Filter Language
 
 
 HBase and Spark
 
-82. Basic Spark
-83. Spark Streaming
-84. Bulk Load
-85. SparkSQL/DataFrames
+83. Basic Spark
+84. Spark Streaming
+85. Bulk Load
+86. SparkSQL/DataFrames
 
 
 Apache HBase Coprocessors
 
-86. Coprocessor Overview
-87. Types of Coprocessors
-88. Loading Coprocessors
-89. Examples
-90. Guidelines For 
Deploying A Coprocessor
-91. Monitor Time Spent in 
Coprocessors
+87. Coprocessor Overview
+88. Types of Coprocessors
+89. Loading Coprocessors
+90. Examples
+91. Guidelines For 
Deploying A Coprocessor
+92. Monitor Time Spent in 
Coprocessors
 
 
 Apache HBase Performance Tuning
 
-92. Operating System
-93. Network
-94. Java
-95. HBase Configurations
-96. ZooKeeper
-97. Schema Design
-98. HBase General Patterns
-99. Writing to HBase
-100. Reading from HBase
-101. Deleting from HBase
-102. HDFS
-103. Amazon EC2
-104. Collocating HBase and 
MapReduce
-105. Case Studies
+93. Operating System
+94. Network
+95. Java
+96. HBase Configurations
+97. ZooKeeper
+98. Schema Design
+99. HBase General Patterns
+100. Writing to HBase
+101. Reading from HBase
+102. Deleting from HBase
+103. HDFS
+104. Amazon EC2
+105. Collocating HBase and 
MapReduce
+106. Case Studies
 
 
 Troubleshooting and Debugging Apache HBase
 
-106. General Guidelines
-107. Logs
-108. Resources
-109. Tools
-110. Client
-111. MapReduce
-112. NameNode
-113. Network
-114. RegionServer
-115. Master
-116. ZooKeeper
-117. Amazon EC2
-118. HBase and Hadoop version issues
-119. IPC Configuration 
Conflicts with Hadoop
-120. HBase and HDFS
-121. Running unit or integration tests
-122. Case Studies
-123. Cryptographic Features
-124. Operating System 
Specific Issues
-125. JDK Issues
+107. General Guidelines
+108. Logs
+109. Resources
+110. Tools
+111. Client
+112. MapReduce
+113. NameNode
+114. Network
+115. RegionServer
+116. Master
+117. ZooKeeper
+118. Amazon EC2
+119. HBase and Hadoop version issues
+120. IPC Configuration 
Conflicts with Hadoop
+121. HBase and HDFS
+122. Running unit or integration tests
+123. Case Studies

[08/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html
 
b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html
index 7deca6c..1acadd1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html
@@ -107,7 +107,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public static interface BufferedMutator.ExceptionListener
+public static interface BufferedMutator.ExceptionListener
 Listens for asynchronous exceptions on a BufferedMutator.
 
 
@@ -153,7 +153,7 @@ public static interface 
 
 onException
-voidonException(RetriesExhaustedWithDetailsExceptionexception,
+voidonException(RetriesExhaustedWithDetailsExceptionexception,
  BufferedMutatormutator)
   throws RetriesExhaustedWithDetailsException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html 
b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html
index 99260f2..8347580 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6};
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -230,6 +230,18 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 Sends a Mutation to the table.
 
 
+
+void
+setOperationTimeout(inttimeout)
+Set operation timeout for this mutator instance
+
+
+
+void
+setRpcTimeout(inttimeout)
+Set rpc timeout for this mutator instance
+
+
 
 
 
@@ -343,7 +355,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 
 getWriteBufferSize
 longgetWriteBufferSize()
@@ -356,6 +368,26 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
+
+
+
+
+
+setRpcTimeout
+voidsetRpcTimeout(inttimeout)
+Set rpc timeout for this mutator instance
+
+
+
+
+
+
+
+setOperationTimeout
+voidsetOperationTimeout(inttimeout)
+Set operation timeout for this mutator instance
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html
 
b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html
index 7ecf3ac..368253f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class BufferedMutatorImpl.QueueRowAccess
+private class BufferedMutatorImpl.QueueRowAccess
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RowAccessRow
 
@@ -231,7 +231,7 @@ implements 
 
 remainder
-privateint remainder
+privateint remainder
 
 
 
@@ -248,7 +248,7 @@ implements 
 
 QueueRowAccess
-privateQueueRowAccess()
+privateQueueRowAccess()
 
 
 
@@ -265,7 +265,7 @@ implements 
 
 reset
-voidreset()
+voidreset()
 
 
 
@@ -274,7 +274,7 @@ implements 
 
 iterator
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorRowiterator()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorRowiterator()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true#iterator--;
 title="class or interface in java.lang">iteratorin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true;
 title="class or interface in java.lang">IterableRow
@@ -287,7 +287,7 @@ implements 
 
 size
-publicintsize()
+publicintsize()
 
 Specified by:
 sizein
 interfaceRowAccessRow
@@ -302,7 +302,7 @@ implements 
 
 restoreRemainder
-voidrestoreRemainder()
+voidrestoreRemainder()
 
 
 
@@ -311,7 +311,7 @@ implements 
 
 isEmpty

[24/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.html
index 70e8f69..200da30 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class KeyValue
+public class KeyValue
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ExtendedCell
 An HBase Key/Value. This is the fundamental HBase Type.
@@ -1200,7 +1200,7 @@ implements 
 
 EMPTY_ARRAY_LIST
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListTag EMPTY_ARRAY_LIST
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListTag EMPTY_ARRAY_LIST
 
 
 
@@ -1209,7 +1209,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -1218,7 +1218,7 @@ implements 
 
 COLUMN_FAMILY_DELIMITER
-public static finalchar COLUMN_FAMILY_DELIMITER
+public static finalchar COLUMN_FAMILY_DELIMITER
 Colon character in UTF-8
 
 See Also:
@@ -1232,7 +1232,7 @@ implements 
 
 COLUMN_FAMILY_DELIM_ARRAY
-public static finalbyte[] COLUMN_FAMILY_DELIM_ARRAY
+public static finalbyte[] COLUMN_FAMILY_DELIM_ARRAY
 
 
 
@@ -1242,7 +1242,7 @@ implements 
 COMPARATOR
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public static finalKeyValue.KVComparator COMPARATOR
+public static finalKeyValue.KVComparator COMPARATOR
 Deprecated.Use CellComparator.COMPARATOR
 instead
 Comparator for plain key/values; i.e. non-catalog table 
key/values. Works on Key portion
  of KeyValue only.
@@ -1255,7 +1255,7 @@ public static final
 META_COMPARATOR
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public static finalKeyValue.KVComparator META_COMPARATOR
+public static finalKeyValue.KVComparator META_COMPARATOR
 Deprecated.Use CellComparator.META_COMPARATOR
 instead
 A KeyValue.KVComparator 
for hbase:meta catalog table
  KeyValues.
@@ -1268,7 +1268,7 @@ public static final
 RAW_COMPARATOR
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public static finalKeyValue.KVComparator RAW_COMPARATOR
+public static finalKeyValue.KVComparator RAW_COMPARATOR
 Deprecated.
 Needed for Bloom Filters.
 * @deprecated Use Bytes.BYTES_RAWCOMPARATOR
 instead
@@ -1280,7 +1280,7 @@ public static final
 
 KEY_LENGTH_SIZE
-public static finalint KEY_LENGTH_SIZE
+public static finalint KEY_LENGTH_SIZE
 Size of the key length field in bytes
 
 See Also:
@@ -1294,7 +1294,7 @@ public static final
 
 TYPE_SIZE
-public static finalint TYPE_SIZE
+public static finalint TYPE_SIZE
 Size of the key type field in bytes
 
 See Also:
@@ -1308,7 +1308,7 @@ public static final
 
 ROW_LENGTH_SIZE
-public static finalint ROW_LENGTH_SIZE
+public static finalint ROW_LENGTH_SIZE
 Size of the row length field in bytes
 
 See Also:
@@ -1322,7 +1322,7 @@ public static final
 
 FAMILY_LENGTH_SIZE
-public static finalint FAMILY_LENGTH_SIZE
+public static finalint FAMILY_LENGTH_SIZE
 Size of the family length field in bytes
 
 See Also:
@@ -1336,7 +1336,7 @@ public static final
 
 TIMESTAMP_SIZE
-public static finalint TIMESTAMP_SIZE
+public static finalint TIMESTAMP_SIZE
 Size of the timestamp field in bytes
 
 See Also:
@@ -1350,7 +1350,7 @@ public static final
 
 TIMESTAMP_TYPE_SIZE
-public static finalint TIMESTAMP_TYPE_SIZE
+public static finalint TIMESTAMP_TYPE_SIZE
 
 See Also:
 Constant
 Field Values
@@ -1363,7 +1363,7 @@ public static final
 
 KEY_INFRASTRUCTURE_SIZE
-public static finalint KEY_INFRASTRUCTURE_SIZE
+public static finalint KEY_INFRASTRUCTURE_SIZE
 
 See Also:
 Constant
 Field Values
@@ -1376,7 +1376,7 @@ public static final
 
 ROW_OFFSET
-public static finalint ROW_OFFSET
+public static finalint ROW_OFFSET
 
 See Also:
 Constant
 Field Values
@@ -1389,7 +1389,7 @@ public static final
 
 ROW_KEY_OFFSET
-public static finalint ROW_KEY_OFFSET
+public static finalint ROW_KEY_OFFSET
 
 See Also:
 Constant
 Field Values
@@ -1402,7 +1402,7 @@ public static final
 
 KEYVALUE_INFRASTRUCTURE_SIZE
-public static finalint KEYVALUE_INFRASTRUCTURE_SIZE
+public static finalint KEYVALUE_INFRASTRUCTURE_SIZE
 
 See Also:
 Constant
 Field Values
@@ -1415,7 +1415,7 @@ public static final
 
 

[22/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.

2016-10-17 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index c0e8535..97d04ae 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -173,69 +173,65 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.mob.mapreduce
-
-
-
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.handler
 
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.model
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.security.visibility
 
 
-
+
 org.apache.hadoop.hbase.thrift
 
 Provides an HBase http://incubator.apache.org/thrift/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.util.test
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
@@ -954,9 +950,10 @@ service.
 
 
 static KeyValue
-KeyValueUtil.copyCellTo(Cellcell,
+KeyValueUtil.copyCellTo(Cellcell,
   byte[]buf,
-  intoffset)
+  intoffset,
+  intlen)
 Write the given cell in KeyValue serialization format into 
the given buf and return a new
  KeyValue object around that.
 
@@ -1440,7 +1437,7 @@ service.
  intrvlength)
 
 
-static void
+static int
 KeyValueUtil.oswrite(Cellcell,
http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
booleanwithTags)
@@ -4217,26 +4214,6 @@ service.
 
 
 
-
-
-
-Uses of Cell in org.apache.hadoop.hbase.mob.mapreduce
-
-Methods in org.apache.hadoop.hbase.mob.mapreduce
 with parameters of type Cell
-
-Modifier and Type
-Method and Description
-
-
-
-void
-MemStoreWrapper.addToMemstore(Cellcell)
-Adds a Cell into the memstore.
-
-
-
-
-
 
 
 
@@ -4979,81 +4956,77 @@ service.
 
 
 long
-Store.add(Cellcell)
-Adds a value to the memstore
-
-
-
-long
 AbstractMemStore.add(Cellcell)
 Write an update
 
 
-
+
 long
 MemStore.add(Cellcell)
 Write an update
 
 
-
+
 boolean
 CellSet.add(Celle)
 
-
+
 boolean
 CellFlatMap.CellFlatMapCollection.add(Cellk)
 
-
+
 long
-HStore.add(Cellcell)
+HStore.add(Cellcell)
+Adds a value to the memstore
+
 
-
+
 long
 MutableSegment.add(Cellcell,
booleanmslabUsed)
 Adds the given cell into the segment
 
 
-
+
 void
 CellSink.append(Cellcell)
 Append the given cell
 
 
-
+
 void
 StripeMultiFileWriter.BoundaryMultiWriter.append(Cellcell)
 
-
+
 void
 StripeMultiFileWriter.SizeMultiWriter.append(Cellcell)
 
-
+
 void
 StoreFileWriter.append(Cellcell)
 
-
+
 void
 DateTieredMultiFileWriter.append(Cellcell)
 
-
+
 private void
 StoreFileWriter.appendDeleteFamilyBloomFilter(Cellcell)
 
-
+
 private void
 StoreFileWriter.appendGeneralBloomfilter(Cellcell)
 
-
+
 private long
 HRegion.applyToMemstore(Storestore,
Cellcell)
 
-
+
 boolean
 NonReversedNonLazyKeyValueScanner.backwardSeek(Cellkey)
 
-
+
 boolean
 SegmentScanner.backwardSeek(Cellkey)
 Seek the scanner at or before the row of specified Cell, it 
firstly
@@ -5063,17 +5036,17 @@ service.
  previous row of specified KeyValue
 
 
-
+
 boolean
 MemStoreScanner.backwardSeek(Cellcell)
 Set the scanner at the seek key.
 
 
-
+
 boolean
 ReversedKeyValueHeap.backwardSeek(CellseekKey)
 
-
+
 boolean
 KeyValueScanner.backwardSeek(Cellkey)
 Seek the scanner at or before the row of specified Cell, it 
firstly
@@ -5083,33 +5056,33 @@ service.
  previous row of specified KeyValue
 
 
-
+
 boolean
 StoreFileScanner.backwardSeek(Cellkey)
 
-
+
 boolean
 ReversedStoreScanner.backwardSeek(Cellkey)
 
-
+
 Cell
 CellSet.ceiling(Celle)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryCell,Cell
 CellFlatMap.ceilingEntry(Cellk)
 
-
+
 Cell
 CellFlatMap.ceilingKey(Cellk)
 
-
+
 private boolean
 StoreFileReader.checkGeneralBloomFilter(byte[]key,
CellkvKey,
BloomFilterbloomFilter)
 
-
+
 protected void
 StoreScanner.checkScanOrder(CellprevKV,
   Cellkv,
@@ -5117,116 +5090,116 @@ service.
 Check whether scan as expected order
 
 
-
+
 protected void
 ReversedStoreScanner.checkScanOrder(CellprevKV,
   Cellkv,
   CellComparatorcomparator)
 
-
+
 int
 Segment.compare(Cellleft,
Cellright)
 
-
+
 int
 KeyValueHeap.KVScannerComparator.compare(Cellleft,
Cellright)
 Compares two KeyValue
 
 
-
+
 int
 

hbase git commit: Revert "HBASE-16750 hbase compilation failed on power system (Saravanan Krishnamoorthy)"

2016-10-10 Thread dimaspivak
Repository: hbase
Updated Branches:
  refs/heads/master 6b6a80187 -> 3c35a722d


Revert "HBASE-16750 hbase compilation failed on power system (Saravanan 
Krishnamoorthy)"

This reverts commit d1e40bf0bda4d82ab217e6b715e7c4dd5a6b9af2.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3c35a722
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3c35a722
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3c35a722

Branch: refs/heads/master
Commit: 3c35a722d9c1c77826d7c86ee204274bfdaae65f
Parents: 6b6a801
Author: Dima Spivak 
Authored: Mon Oct 10 10:08:41 2016 -0500
Committer: Dima Spivak 
Committed: Mon Oct 10 10:08:41 2016 -0500

--
 pom.xml | 6 ++
 1 file changed, 2 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3c35a722/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 7715278..2d341c0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1009,7 +1009,7 @@
   
 org.asciidoctor
 asciidoctorj-pdf
-1.5.0-alpha.11
+1.5.0-alpha.6
   
 
 
@@ -1019,8 +1019,6 @@
   coderay
   
 ${project.version}
-${project.build.sourceDirectory}
-
   
 
 
@@ -1235,7 +1233,7 @@
 1.3.9-1
 6.18
 2.10.3
-1.5.3
+1.5.2.1
 
 /usr
 /etc/hbase



hbase git commit: HBASE-16753 There is a mismatch between suggested Java version in hbase-env.sh

2016-10-05 Thread dimaspivak
Repository: hbase
Updated Branches:
  refs/heads/master 3aa4dfa73 -> 1f1a13f2e


HBASE-16753 There is a mismatch between suggested Java version in hbase-env.sh

Signed-off-by: Dima Spivak 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1f1a13f2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1f1a13f2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1f1a13f2

Branch: refs/heads/master
Commit: 1f1a13f2e2a28eb818cd85b6c50e47b52aaa2c2e
Parents: 3aa4dfa
Author: Umesh Agashe 
Authored: Mon Oct 3 14:02:28 2016 -0700
Committer: Dima Spivak 
Committed: Wed Oct 5 10:16:41 2016 -0700

--
 bin/hbase-config.sh| 2 +-
 conf/hbase-env.cmd | 2 +-
 conf/hbase-env.sh  | 4 ++--
 src/main/asciidoc/_chapters/configuration.adoc | 2 +-
 4 files changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/bin/hbase-config.sh
--
diff --git a/bin/hbase-config.sh b/bin/hbase-config.sh
index d7d7e6f..bf4ee92 100644
--- a/bin/hbase-config.sh
+++ b/bin/hbase-config.sh
@@ -132,7 +132,7 @@ if [ -z "$JAVA_HOME" ]; then
 | Please download the latest Sun JDK from the Sun Java web site|
 | > http://www.oracle.com/technetwork/java/javase/downloads|
 |  |
-| HBase requires Java 1.7 or later.|
+| HBase requires Java 1.8 or later.|
 +==+
 EOF
 exit 1

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/conf/hbase-env.cmd
--
diff --git a/conf/hbase-env.cmd b/conf/hbase-env.cmd
index d16de55..8c8597e 100644
--- a/conf/hbase-env.cmd
+++ b/conf/hbase-env.cmd
@@ -18,7 +18,7 @@
 
 @rem Set environment variables here.
 
-@rem The java implementation to use.  Java 1.7+ required.
+@rem The java implementation to use.  Java 1.8+ required.
 @rem set JAVA_HOME=c:\apps\java
 
 @rem Extra Java CLASSPATH elements.  Optional.

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/conf/hbase-env.sh
--
diff --git a/conf/hbase-env.sh b/conf/hbase-env.sh
index 31e8441..d9879c6 100644
--- a/conf/hbase-env.sh
+++ b/conf/hbase-env.sh
@@ -24,8 +24,8 @@
 # so try to keep things idempotent unless you want to take an even deeper look
 # into the startup scripts (bin/hbase, etc.)
 
-# The java implementation to use.  Java 1.7+ required.
-# export JAVA_HOME=/usr/java/jdk1.6.0/
+# The java implementation to use.  Java 1.8+ required.
+# export JAVA_HOME=/usr/java/jdk1.8.0/
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HBASE_CLASSPATH=

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/src/main/asciidoc/_chapters/configuration.adoc
--
diff --git a/src/main/asciidoc/_chapters/configuration.adoc 
b/src/main/asciidoc/_chapters/configuration.adoc
index 4804332..048b047 100644
--- a/src/main/asciidoc/_chapters/configuration.adoc
+++ b/src/main/asciidoc/_chapters/configuration.adoc
@@ -729,7 +729,7 @@ The following lines in the _hbase-env.sh_ file show how to 
set the `JAVA_HOME` e
 
 
 # The java implementation to use.
-export JAVA_HOME=/usr/java/jdk1.7.0/
+export JAVA_HOME=/usr/java/jdk1.8.0/
 
 # The maximum amount of heap to use. Default is left to JVM default.
 export HBASE_HEAPSIZE=4G



[07/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
index 695f6db..5965e94 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private static class CellUtil.FirstOnRowColCell
+private static class CellUtil.FirstOnRowColCell
 extends CellUtil.FirstOnRowCell
 
 
@@ -287,7 +287,7 @@ extends 
 
 fArray
-private finalbyte[] fArray
+private finalbyte[] fArray
 
 
 
@@ -296,7 +296,7 @@ extends 
 
 foffset
-private finalint foffset
+private finalint foffset
 
 
 
@@ -305,7 +305,7 @@ extends 
 
 flength
-private finalbyte flength
+private finalbyte flength
 
 
 
@@ -314,7 +314,7 @@ extends 
 
 qArray
-private finalbyte[] qArray
+private finalbyte[] qArray
 
 
 
@@ -323,7 +323,7 @@ extends 
 
 qoffset
-private finalint qoffset
+private finalint qoffset
 
 
 
@@ -332,7 +332,7 @@ extends 
 
 qlength
-private finalint qlength
+private finalint qlength
 
 
 
@@ -349,7 +349,7 @@ extends 
 
 FirstOnRowColCell
-publicFirstOnRowColCell(byte[]rArray,
+publicFirstOnRowColCell(byte[]rArray,
  introffset,
  shortrlength,
  byte[]fArray,
@@ -374,7 +374,7 @@ extends 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -394,7 +394,7 @@ extends 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Specified by:
 getFamilyOffsetin
 interfaceCell
@@ -411,7 +411,7 @@ extends 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -428,7 +428,7 @@ extends 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -447,7 +447,7 @@ extends 
 
 getQualifierOffset
-publicintgetQualifierOffset()
+publicintgetQualifierOffset()
 
 Specified by:
 getQualifierOffsetin
 interfaceCell
@@ -464,7 +464,7 @@ extends 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Specified by:
 getQualifierLengthin
 interfaceCell

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
index 29dd9e4..cad2f03 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
@@ -138,7 +138,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private static class CellUtil.FirstOnRowColTSByteBufferedCell
+private static class CellUtil.FirstOnRowColTSByteBufferedCell
 extends CellUtil.FirstOnRowColByteBufferedCell
 
 
@@ -256,7 +256,7 @@ extends 
 
 ts
-privatelong ts
+privatelong ts
 
 
 
@@ -273,7 +273,7 @@ extends 
 
 FirstOnRowColTSByteBufferedCell
-publicFirstOnRowColTSByteBufferedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrBuffer,
+publicFirstOnRowColTSByteBufferedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrBuffer,
introffset,
shortrlength,
http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferfBuffer,
@@ -299,7 +299,7 @@ extends 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Specified by:
 getTimestampin
 interfaceCell

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
index 6027eed..50a8da6 100644
--- 

[06/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
index 93c76dc..94ef7ee 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-Cell, HeapSize, SettableSequenceId, SettableTimestamp
+http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, Cell, ExtendedCell, HeapSize, SettableSequenceId, SettableTimestamp
 
 
 Direct Known Subclasses:
@@ -122,9 +122,9 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private static class CellUtil.TagRewriteCell
+private static class CellUtil.TagRewriteCell
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements Cell, SettableSequenceId, SettableTimestamp, HeapSize
+implements ExtendedCell
 This can be used when a Cell has to change with 
addition/removal of one or more tags. This is an
  efficient way to do so in which only the tags bytes part need to recreated 
and copied. All other
  parts, refer to the original Cell.
@@ -237,62 +237,73 @@ implements 
+int
+getSerializedSize(booleanwithTags)
+
+
 byte[]
 getTagsArray()
 
-
+
 int
 getTagsLength()
 
-
+
 int
 getTagsOffset()
 
-
+
 long
 getTimestamp()
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 long
 heapSize()
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts,
 inttsOffset)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
+
+int
+write(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
+ booleanwithTags)
+Write this cell to an OutputStream in a KeyValue format.
+
+
 
 
 
@@ -370,7 +381,7 @@ implements 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -388,7 +399,7 @@ implements 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Specified by:
 getRowOffsetin
 interfaceCell
@@ -403,7 +414,7 @@ implements 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 getRowLengthin
 interfaceCell
@@ -418,7 +429,7 @@ implements 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -436,7 +447,7 @@ implements 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Specified by:
 getFamilyOffsetin
 interfaceCell
@@ -451,7 +462,7 @@ implements 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -466,7 +477,7 @@ implements 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -483,7 +494,7 @@ implements 
 
 getQualifierOffset
-publicintgetQualifierOffset()
+publicintgetQualifierOffset()
 
 Specified by:
 getQualifierOffsetin
 interfaceCell
@@ -498,7 +509,7 @@ implements 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Specified by:
 getQualifierLengthin
 interfaceCell
@@ -513,7 +524,7 @@ implements 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Specified by:
 getTimestampin
 interfaceCell

[12/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index 5b7277c..b673f22 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -81,15 +81,9 @@
 AsyncFSWAL.Payload
 AsyncFSWALProvider
 AsyncFSWALProvider.AsyncWriter
-AsyncHBaseSaslRpcClient
-AsyncHBaseSaslRpcClientHandler
 AsyncProcess
-AsyncProcess.AsyncRequestFuture
-AsyncProcess.BatchErrors
 AsyncProcess.ListRowAccess
-AsyncProcess.ReplicaResultState
 AsyncProcess.RequestSizeChecker
-AsyncProcess.Retry
 AsyncProcess.RowChecker
 AsyncProcess.RowChecker.ReturnCode
 AsyncProcess.RowCheckerHost
@@ -97,6 +91,10 @@
 AsyncProcess.TaskCountChecker
 AsyncProtobufLogWriter
 AsyncProtobufLogWriter.BlockingCompletionHandler
+AsyncRequestFuture
+AsyncRequestFutureImpl
+AsyncRequestFutureImpl.ReplicaResultState
+AsyncRequestFutureImpl.Retry
 AtomicUtils
 Attributes
 AuthenticationKey
@@ -161,6 +159,7 @@
 Batch
 Batch.Call
 Batch.Callback
+BatchErrors
 BigDecimalColumnInterpreter
 BinaryComparator
 BinaryPrefixComparator
@@ -666,6 +665,7 @@
 ExpressionExpander
 ExpressionNode
 ExpressionParser
+ExtendedCell
 FailedLogCloseException
 FailedSanityCheckException
 FailedServerException
@@ -1089,6 +1089,8 @@
 KeepDeletedCells
 KeyLocker
 KeyOnlyFilter
+KeyOnlyFilter.KeyOnlyByteBufferedCell
+KeyOnlyFilter.KeyOnlyCell
 KeyPrefixRegionSplitPolicy
 KeyProvider
 KeyRange
@@ -1238,7 +1240,6 @@
 MemStoreFlusher.WakeupFlushThread
 MemStoreLAB
 MemStoreScanner
-MemStoreScanner.Type
 MemStoreSnapshot
 MemStoreWrapper
 Merge
@@ -1446,6 +1447,9 @@
 NamespacesResource
 NamespaceStateManager
 NamespaceTableAndRegionInfo
+NamespaceTableCfWALEntryFilter
+NettyHBaseSaslRpcClient
+NettyHBaseSaslRpcClientHandler
 NettyRpcClient
 NettyRpcClientConfigHelper
 NettyRpcConnection
@@ -1843,7 +1847,6 @@
 ReplicationTracker
 ReplicationTrackerZKImpl
 ReplicationWALReaderManager
-ReplicationZKLockCleanerChore
 ResizableBlockCache
 ResourceBase
 ResourceConfig
@@ -2001,6 +2004,7 @@
 ScannerModel.FilterModel.ByteArrayComparableModel
 ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType
 ScannerModel.FilterModel.FilterType
+ScannerResetException
 ScannerResource
 ScannerResultGenerator
 ScannerTimeoutException
@@ -2017,7 +2021,6 @@
 SecureBulkLoadEndpoint
 SecureBulkLoadManager
 SecureBulkLoadManager.SecureBulkLoadListener
-SecureBulkLoadUtil
 SecureProtobufLogReader
 SecureProtobufLogWriter
 SecureWALCellCodec
@@ -2159,6 +2162,9 @@
 SplitTransactionImpl
 SplitTransactionImpl.JournalEntryImpl
 SplitTransactionImpl.LoggingProgressable
+SslRMIClientSocketFactorySecure
+SslRMIServerSocketFactorySecure
+SslSelectChannelConnectorSecure
 SslSocketConnectorSecure
 StabilityOptions
 StateDumpServlet
@@ -2219,7 +2225,6 @@
 StoreScanner
 StoreScanner.StoreScannerCompactionRace
 StoreUtils
-Streamable
 StreamUtils
 Strings
 StringUtils
@@ -2275,7 +2280,6 @@
 TableBasedReplicationQueuesClientImpl
 TableBasedReplicationQueuesImpl
 TableCFsUpdater
-TableCfWALEntryFilter
 TableDescriptors
 TableExistsException
 TableHFileArchiveTracker
@@ -2543,7 +2547,6 @@
 ZKSplitLogManagerCoordination
 ZKSplitLogManagerCoordination.TaskFinisher
 ZKSplitLogManagerCoordination.TaskFinisher.Status
-ZKSplitLogManagerCoordination.ZkSplitLogManagerDetails
 ZkSplitLogWorkerCoordination
 ZkSplitLogWorkerCoordination.ZkSplitTaskDetails
 ZKTableArchiveClient

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/allclasses-noframe.html
--
diff --git a/devapidocs/allclasses-noframe.html 
b/devapidocs/allclasses-noframe.html
index 40b0029..af5296d 100644
--- a/devapidocs/allclasses-noframe.html
+++ b/devapidocs/allclasses-noframe.html
@@ -81,15 +81,9 @@
 AsyncFSWAL.Payload
 AsyncFSWALProvider
 AsyncFSWALProvider.AsyncWriter
-AsyncHBaseSaslRpcClient
-AsyncHBaseSaslRpcClientHandler
 AsyncProcess
-AsyncProcess.AsyncRequestFuture
-AsyncProcess.BatchErrors
 AsyncProcess.ListRowAccess
-AsyncProcess.ReplicaResultState
 AsyncProcess.RequestSizeChecker
-AsyncProcess.Retry
 AsyncProcess.RowChecker
 AsyncProcess.RowChecker.ReturnCode
 AsyncProcess.RowCheckerHost
@@ -97,6 +91,10 @@
 AsyncProcess.TaskCountChecker
 AsyncProtobufLogWriter
 AsyncProtobufLogWriter.BlockingCompletionHandler
+AsyncRequestFuture
+AsyncRequestFutureImpl
+AsyncRequestFutureImpl.ReplicaResultState
+AsyncRequestFutureImpl.Retry
 AtomicUtils
 Attributes
 AuthenticationKey
@@ -161,6 +159,7 @@
 Batch
 Batch.Call
 Batch.Callback
+BatchErrors
 BigDecimalColumnInterpreter
 BinaryComparator
 BinaryPrefixComparator
@@ -666,6 +665,7 @@
 ExpressionExpander
 ExpressionNode
 ExpressionParser
+ExtendedCell
 FailedLogCloseException
 FailedSanityCheckException
 FailedServerException
@@ -1089,6 +1089,8 @@
 KeepDeletedCells
 KeyLocker
 KeyOnlyFilter

[17/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
index 4dbb152..3d30c3b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
@@ -32,26 +32,26 @@
 024import java.io.DataInput;
 025import java.io.DataOutput;
 026import java.io.IOException;
-027import java.math.BigDecimal;
-028import java.math.BigInteger;
-029import java.nio.ByteBuffer;
-030import java.nio.charset.Charset;
-031import java.security.SecureRandom;
-032import java.util.Arrays;
-033import java.util.Collection;
-034import java.util.Comparator;
-035import java.util.Iterator;
-036import java.util.List;
-037
-038import com.google.protobuf.ByteString;
+027import 
java.io.UnsupportedEncodingException;
+028import java.math.BigDecimal;
+029import java.math.BigInteger;
+030import java.nio.ByteBuffer;
+031import java.nio.charset.Charset;
+032import 
java.nio.charset.StandardCharsets;
+033import java.security.SecureRandom;
+034import java.util.Arrays;
+035import java.util.Collection;
+036import java.util.Comparator;
+037import java.util.Iterator;
+038import java.util.List;
 039
 040import org.apache.commons.logging.Log;
 041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.KeyValue;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 047import 
org.apache.hadoop.io.RawComparator;
 048import 
org.apache.hadoop.io.WritableComparator;
 049import 
org.apache.hadoop.io.WritableUtils;
@@ -60,2598 +60,2618 @@
 052
 053import 
com.google.common.annotations.VisibleForTesting;
 054import com.google.common.collect.Lists;
-055
-056/**
-057 * Utility class that handles byte 
arrays, conversions to/from other types,
-058 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-059 * HashSets, and can be used as key in 
maps or trees.
-060 */
-061@SuppressWarnings("restriction")
-062@InterfaceAudience.Public
-063@InterfaceStability.Stable
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068  //HConstants.UTF8_ENCODING should be 
updated if this changed
-069  /** When we encode strings, we always 
specify UTF8 encoding */
-070  private static final String 
UTF8_ENCODING = "UTF-8";
-071
-072  //HConstants.UTF8_CHARSET should be 
updated if this changed
-073  /** When we encode strings, we always 
specify UTF8 encoding */
-074  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
-075
-076  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-077  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-078
-079  private static final Log LOG = 
LogFactory.getLog(Bytes.class);
+055import com.google.protobuf.ByteString;
+056
+057/**
+058 * Utility class that handles byte 
arrays, conversions to/from other types,
+059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
+060 * HashSets, and can be used as key in 
maps or trees.
+061 */
+062@SuppressWarnings("restriction")
+063@InterfaceAudience.Public
+064@InterfaceStability.Stable
+065@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+066
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
+067justification="It has been like this 
forever")
+068public class Bytes implements 
ComparableBytes {
+069  //HConstants.UTF8_ENCODING should be 
updated if this changed
+070  /** When we encode strings, we always 
specify UTF8 encoding */
+071  private static final String 
UTF8_ENCODING = "UTF-8";
+072
+073  //HConstants.UTF8_CHARSET should be 
updated if this changed
+074  /** When we encode strings, we always 
specify UTF8 encoding */
+075  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
+076
+077  // Using the charset canonical name for 
String/byte[] conversions is much
+078  // more efficient due to use of cached 
encoders/decoders.
+079  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
 080
-081  /**
-082   * Size of boolean in bytes
-083   */
-084  public static final int SIZEOF_BOOLEAN 
= Byte.SIZE / Byte.SIZE;
+081  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this 

[27/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
index d607296..e22025b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
@@ -57,616 +57,615 @@
 049import 
org.apache.hadoop.hbase.TableName;
 050import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 051import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-052import 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
-053import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-054import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-055
-056/**
-057 * HTableMultiplexer provides a 
thread-safe non blocking PUT API across all the tables.
-058 * Each put will be sharded into 
different buffer queues based on its destination region server.
-059 * So each region server buffer queue 
will only have the puts which share the same destination.
-060 * And each queue will have a flush 
worker thread to flush the puts request to the region server.
-061 * If any queue is full, the 
HTableMultiplexer starts to drop the Put requests for that
-062 * particular queue.
-063 *
-064 * Also all the puts will be retried as a 
configuration number before dropping.
-065 * And the HTableMultiplexer can report 
the number of buffered requests and the number of the
-066 * failed (dropped) requests in total or 
on per region server basis.
-067 *
-068 * This class is thread safe.
-069 */
-070@InterfaceAudience.Public
-071@InterfaceStability.Evolving
-072public class HTableMultiplexer {
-073  private static final Log LOG = 
LogFactory.getLog(HTableMultiplexer.class.getName());
-074
-075  public static final String 
TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
-076  
"hbase.tablemultiplexer.flush.period.ms";
-077  public static final String 
TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads";
-078  public static final String 
TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE =
-079  
"hbase.client.max.retries.in.queue";
-080
-081  /** The map between each region server 
to its flush worker */
-082  private final MapHRegionLocation, 
FlushWorker serverToFlushWorkerMap =
-083  new ConcurrentHashMap();
-084
-085  private final Configuration 
workerConf;
-086  private final ClusterConnection conn;
-087  private final ExecutorService pool;
-088  private final int maxAttempts;
-089  private final int 
perRegionServerBufferQueueSize;
-090  private final int maxKeyValueSize;
-091  private final ScheduledExecutorService 
executor;
-092  private final long flushPeriod;
-093
-094  /**
-095   * @param conf The HBaseConfiguration
-096   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-097   *  each region server before 
dropping the request.
-098   */
-099  public HTableMultiplexer(Configuration 
conf, int perRegionServerBufferQueueSize)
-100  throws IOException {
-101
this(ConnectionFactory.createConnection(conf), conf, 
perRegionServerBufferQueueSize);
-102  }
-103
-104  /**
-105   * @param conn The HBase connection.
-106   * @param conf The HBase 
configuration
-107   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-108   *  each region server before 
dropping the request.
-109   */
-110  public HTableMultiplexer(Connection 
conn, Configuration conf,
-111  int perRegionServerBufferQueueSize) 
{
-112this.conn = (ClusterConnection) 
conn;
-113this.pool = 
HTable.getDefaultExecutor(conf);
-114// how many times we could try in 
total, one more than retry number
-115this.maxAttempts = 
conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-116
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + 1;
-117this.perRegionServerBufferQueueSize = 
perRegionServerBufferQueueSize;
-118this.maxKeyValueSize = 
HTable.getMaxKeyValueSize(conf);
-119this.flushPeriod = 
conf.getLong(TABLE_MULTIPLEXER_FLUSH_PERIOD_MS, 100);
-120int initThreads = 
conf.getInt(TABLE_MULTIPLEXER_INIT_THREADS, 10);
-121this.executor =
-122
Executors.newScheduledThreadPool(initThreads,
-123  new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("HTableFlushWorker-%d").build());
-124
-125this.workerConf = 
HBaseConfiguration.create(conf);
-126// We do not do the retry because we 
need to reassign puts to different queues if regions are
-127// moved.
-128
this.workerConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 0);
-129  }
-130
-131  /**
-132   * Closes the internal {@link 
Connection}. Does nothing if the {@link Connection} has already
-133   * been closed.

[51/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/044b3379
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/044b3379
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/044b3379

Branch: refs/heads/asf-site
Commit: 044b3379513dec5a6714ace497811c6c4b580536
Parents: e3ab1d1
Author: jenkins 
Authored: Thu Sep 29 15:19:10 2016 +
Committer: Dima Spivak 
Committed: Thu Sep 29 16:07:56 2016 -0400

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 32855 -
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/allclasses-frame.html   | 1 +
 apidocs/allclasses-noframe.html | 1 +
 apidocs/constant-values.html|   571 +-
 apidocs/deprecated-list.html| 7 +-
 apidocs/index-all.html  |   104 +-
 apidocs/org/apache/hadoop/hbase/CellUtil.html   |   418 +-
 .../hadoop/hbase/DoNotRetryIOException.html | 2 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   704 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../hadoop/hbase/UnknownScannerException.html   |16 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   264 +-
 .../hbase/class-use/DoNotRetryIOException.html  | 7 +
 .../hbase/class-use/HBaseIOException.html   | 7 +
 .../hadoop/hbase/class-use/ServerName.html  | 4 +-
 .../hadoop/hbase/class-use/TableName.html   |19 +-
 .../hbase/class-use/TableNotFoundException.html |19 +-
 .../hadoop/hbase/client/CompactionState.html| 4 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 ...ableMultiplexer.HTableMultiplexerStatus.html |20 +-
 .../hadoop/hbase/client/HTableMultiplexer.html  |28 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../org/apache/hadoop/hbase/client/Result.html  |68 +-
 .../hadoop/hbase/client/SnapshotType.html   | 4 +-
 .../hadoop/hbase/client/class-use/Admin.html|19 +-
 .../hbase/client/class-use/Connection.html  |20 +-
 .../hbase/client/class-use/Consistency.html |14 +-
 .../hbase/client/class-use/Durability.html  | 8 +-
 .../hbase/client/class-use/IsolationLevel.html  |14 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hbase/client/class-use/RegionLocator.html   |19 +-
 .../hadoop/hbase/client/class-use/Result.html   |26 +-
 .../hadoop/hbase/client/class-use/Row.html  | 6 +-
 .../hadoop/hbase/client/class-use/Scan.html | 6 +-
 .../hadoop/hbase/client/class-use/Table.html|61 +-
 .../hadoop/hbase/client/package-tree.html   | 8 +-
 .../client/replication/ReplicationAdmin.html|40 +-
 .../exceptions/RegionInRecoveryException.html   | 4 +-
 .../hbase/exceptions/ScannerResetException.html |   318 +
 .../exceptions/UnknownProtocolException.html| 4 +-
 .../class-use/ScannerResetException.html|   125 +
 .../hadoop/hbase/exceptions/package-frame.html  | 1 +
 .../hbase/exceptions/package-summary.html   | 7 +
 .../hadoop/hbase/exceptions/package-tree.html   | 1 +
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.html  |18 +-
 .../hbase/filter/MultiRowRangeFilter.html   |14 +-
 .../filter/class-use/ByteArrayComparable.html   | 8 +-
 .../class-use/CompareFilter.CompareOp.html  | 8 +-
 .../filter/class-use/Filter.ReturnCode.html |64 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |62 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../io/class-use/ImmutableBytesWritable.html|26 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|12 +-
 .../hbase/io/crypto/class-use/Cipher.html   |16 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../hadoop/hbase/ipc/package-summary.html   | 4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 4 +-
 .../hadoop/hbase/jetty/package-frame.html   |14 +
 .../hadoop/hbase/jetty/package-summary.html |   124 +
 .../apache/hadoop/hbase/jetty/package-tree.html |   128 +
 .../apache/hadoop/hbase/jetty/package-use.html  |   125 +
 .../hadoop/hbase/mapred/package-summary.html| 4 +-
 .../hadoop/hbase/mapred/package-tree.html   | 4 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  |   247 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 2 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 .../replication/ReplicationPeerConfig.html  |62 +-
 

[21/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
index b0dae74..d009a5c 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
@@ -30,87 +30,100 @@
 022import java.util.HashMap;
 023import java.util.List;
 024import java.util.Map;
-025import java.util.TreeMap;
-026
-027import 
org.apache.hadoop.hbase.TableName;
-028import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-030import 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
-031import 
org.apache.hadoop.hbase.util.Bytes;
-032
-033/**
-034 * A configuration for the replication 
peer cluster.
-035 */
-036@InterfaceAudience.Public
-037@InterfaceStability.Evolving
-038public class ReplicationPeerConfig {
-039
-040  private String clusterKey;
-041  private String 
replicationEndpointImpl;
-042  private final Mapbyte[], byte[] 
peerData;
-043  private final MapString, String 
configuration;
-044  private MapTableName, ? extends 
CollectionString tableCFsMap = null;
-045
-046
-047  public ReplicationPeerConfig() {
-048this.peerData = new 
TreeMapbyte[], byte[](Bytes.BYTES_COMPARATOR);
-049this.configuration = new 
HashMapString, String(0);
-050  }
-051
-052  /**
-053   * Set the clusterKey which is the 
concatenation of the slave cluster's:
-054   *  
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
-055   */
-056  public ReplicationPeerConfig 
setClusterKey(String clusterKey) {
-057this.clusterKey = clusterKey;
-058return this;
-059  }
-060
-061  /**
-062   * Sets the ReplicationEndpoint plugin 
class for this peer.
-063   * @param replicationEndpointImpl a 
class implementing ReplicationEndpoint
-064   */
-065  public ReplicationPeerConfig 
setReplicationEndpointImpl(String replicationEndpointImpl) {
-066this.replicationEndpointImpl = 
replicationEndpointImpl;
-067return this;
-068  }
-069
-070  public String getClusterKey() {
-071return clusterKey;
-072  }
-073
-074  public String 
getReplicationEndpointImpl() {
-075return replicationEndpointImpl;
-076  }
-077
-078  public Mapbyte[], byte[] 
getPeerData() {
-079return peerData;
-080  }
-081
-082  public MapString, String 
getConfiguration() {
-083return configuration;
-084  }
-085
-086  public MapTableName, 
ListString getTableCFsMap() {
-087return (MapTableName, 
ListString) tableCFsMap;
-088  }
-089
-090  public ReplicationPeerConfig 
setTableCFsMap(MapTableName,
-091  
? extends CollectionString tableCFsMap) {
-092this.tableCFsMap = tableCFsMap;
-093return this;
-094  }
-095
-096  @Override
-097  public String toString() {
-098StringBuilder builder = new 
StringBuilder("clusterKey=").append(clusterKey).append(",");
-099
builder.append("replicationEndpointImpl=").append(replicationEndpointImpl).append(",");
-100if (tableCFsMap != null) {
-101  
builder.append("tableCFs=").append(tableCFsMap.toString());
-102}
-103return builder.toString();
+025import java.util.Set;
+026import java.util.TreeMap;
+027
+028import 
org.apache.hadoop.hbase.TableName;
+029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+031import 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
+032import 
org.apache.hadoop.hbase.util.Bytes;
+033
+034/**
+035 * A configuration for the replication 
peer cluster.
+036 */
+037@InterfaceAudience.Public
+038@InterfaceStability.Evolving
+039public class ReplicationPeerConfig {
+040
+041  private String clusterKey;
+042  private String 
replicationEndpointImpl;
+043  private final Mapbyte[], byte[] 
peerData;
+044  private final MapString, String 
configuration;
+045  private MapTableName, ? extends 
CollectionString tableCFsMap = null;
+046  private SetString namespaces = 
null;
+047
+048  public ReplicationPeerConfig() {
+049this.peerData = new 
TreeMapbyte[], byte[](Bytes.BYTES_COMPARATOR);
+050this.configuration = new 
HashMapString, String(0);
+051  }
+052
+053  /**
+054   * Set the clusterKey which is the 
concatenation of the slave cluster's:
+055   *  
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+056   */
+057  public ReplicationPeerConfig 
setClusterKey(String clusterKey) {
+058this.clusterKey = clusterKey;
+059return this;
+060  }
+061
+062  /**
+063   * Sets the ReplicationEndpoint plugin 
class for this peer.
+064   

[24/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
new file mode 100644
index 000..04cc6bc
--- /dev/null
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
@@ -0,0 +1,122 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018
+019package 
org.apache.hadoop.hbase.exceptions;
+020
+021import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+022import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+023import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+024
+025/**
+026 * Thrown when the server side has 
received an Exception, and asks the Client to reset the scanner
+027 * state by closing the current region 
scanner, and reopening from the start of last seen row.
+028 */
+029@InterfaceAudience.Public
+030@InterfaceStability.Stable
+031public class ScannerResetException 
extends DoNotRetryIOException {
+032  private static final long 
serialVersionUID = -5649728171144849619L;
+033
+034  /** constructor */
+035  public ScannerResetException() {
+036super();
+037  }
+038
+039  /**
+040   * Constructor
+041   * @param s message
+042   */
+043  public ScannerResetException(String s) 
{
+044super(s);
+045  }
+046
+047  public ScannerResetException(String s, 
Exception e) {
+048super(s, e);
+049  }
+050}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
index 5d13aba..d416cfa 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
@@ -33,7 +33,7 @@
 025
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.KeyValueUtil;
+028import 
org.apache.hadoop.hbase.CellUtil;
 029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -183,7 +183,7 @@
 175  return null;
 176}
 177byte[] nextRowKey = 
tracker.nextRow();
-178return 
KeyValueUtil.createFirstOnRow(nextRowKey);
+178return 
CellUtil.createFirstOnRow(nextRowKey, 0, (short) nextRowKey.length);
 179  }
 180
 181  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
index 5dfe4d3..24a0d22 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
@@ -28,117 +28,381 @@
 020
 021
 022import java.io.IOException;
-023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.KeyValue;
-027import 
org.apache.hadoop.hbase.KeyValueUtil;
-028import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-030import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-031import 
org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033

[25/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index a0f0e21..b4f5eb1 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -197,445 +197,481 @@
 189   * @param peerConfig configuration for 
the replication slave cluster
 190   */
 191  public void addPeer(String id, 
ReplicationPeerConfig peerConfig) throws ReplicationException {
-192
this.replicationPeers.registerPeer(id, peerConfig);
-193  }
-194
-195  /**
-196   *  @deprecated as release of 2.0.0, 
and it will be removed in 3.0.0
-197   * */
-198  @Deprecated
-199  public static MapTableName, 
ListString parseTableCFsFromConfig(String tableCFsConfig) {
-200return 
ReplicationSerDeHelper.parseTableCFsFromConfig(tableCFsConfig);
-201  }
-202
-203  public void updatePeerConfig(String id, 
ReplicationPeerConfig peerConfig)
-204  throws ReplicationException {
-205
this.replicationPeers.updatePeerConfig(id, peerConfig);
-206  }
-207  /**
-208   * Removes a peer cluster and stops the 
replication to it.
-209   * @param id a short name that 
identifies the cluster
-210   */
-211  public void removePeer(String id) 
throws ReplicationException {
-212
this.replicationPeers.unregisterPeer(id);
-213  }
-214
-215  /**
-216   * Restart the replication stream to 
the specified peer.
-217   * @param id a short name that 
identifies the cluster
-218   */
-219  public void enablePeer(String id) 
throws ReplicationException {
-220
this.replicationPeers.enablePeer(id);
-221  }
-222
-223  /**
-224   * Stop the replication stream to the 
specified peer.
-225   * @param id a short name that 
identifies the cluster
-226   */
-227  public void disablePeer(String id) 
throws ReplicationException {
-228
this.replicationPeers.disablePeer(id);
-229  }
-230
-231  /**
-232   * Get the number of slave clusters the 
local cluster has.
-233   * @return number of slave clusters
-234   */
-235  public int getPeersCount() {
-236return 
this.replicationPeers.getAllPeerIds().size();
-237  }
-238
-239  public MapString, 
ReplicationPeerConfig listPeerConfigs() {
-240return 
this.replicationPeers.getAllPeerConfigs();
-241  }
-242
-243  public ReplicationPeerConfig 
getPeerConfig(String id) throws ReplicationException {
-244return 
this.replicationPeers.getReplicationPeerConfig(id);
-245  }
-246
-247  /**
-248   * Get the replicable table-cf config 
of the specified peer.
-249   * @param id a short name that 
identifies the cluster
-250   * @deprecated as release of 2.0.0, and 
it will be removed in 3.0.0,
-251   * use {@link #getPeerConfig(String)} 
instead.
-252   * */
-253  @Deprecated
-254  public String getPeerTableCFs(String 
id) throws ReplicationException {
-255return 
ReplicationSerDeHelper.convertToString(this.replicationPeers.getPeerTableCFsConfig(id));
-256  }
-257
-258  /**
-259   * Append the replicable table-cf 
config of the specified peer
-260   * @param id a short that identifies 
the cluster
-261   * @param tableCfs table-cfs config 
str
-262   * @throws ReplicationException
-263   * @deprecated as release of 2.0.0, and 
it will be removed in 3.0.0,
-264   * use {@link 
#appendPeerTableCFs(String, Map)} instead.
-265   */
-266  @Deprecated
-267  public void appendPeerTableCFs(String 
id, String tableCfs) throws ReplicationException {
-268appendPeerTableCFs(id, 
ReplicationSerDeHelper.parseTableCFsFromConfig(tableCfs));
-269  }
-270
-271  /**
-272   * Append the replicable table-cf 
config of the specified peer
-273   * @param id a short that identifies 
the cluster
-274   * @param tableCfs A map from tableName 
to column family names
-275   * @throws ReplicationException
-276   */
-277  public void appendPeerTableCFs(String 
id, MapTableName, ? extends CollectionString tableCfs)
-278  throws ReplicationException {
-279if (tableCfs == null) {
-280  throw new 
ReplicationException("tableCfs is null");
-281}
-282MapTableName, 
ListString preTableCfs = 
this.replicationPeers.getPeerTableCFsConfig(id);
-283if (preTableCfs == null) {
-284  setPeerTableCFs(id, tableCfs);
-285  return;
+192
checkNamespacesAndTableCfsConfigConflict(peerConfig.getNamespaces(),
+193  peerConfig.getTableCFsMap());
+194
this.replicationPeers.registerPeer(id, peerConfig);
+195  }
+196
+197  /**
+198   *  @deprecated as release of 2.0.0, 
and it will be removed in 3.0.0
+199   * */
+200  @Deprecated
+201  public static MapTableName, 
ListString parseTableCFsFromConfig(String tableCFsConfig) {
+202return 

[32/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index d4666b5..e626707 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -124,80 +124,88 @@
 
 
 
-byte[]
-OrderedBlob.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+RawShort.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRangesrc)
+T
+TerminatedWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
 OrderedFloat64.decode(PositionedByteRangesrc)
 
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRangesrc)
-
 
-T
-FixedLengthWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number
 OrderedNumeric.decode(PositionedByteRangesrc)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRangesrc)
+
 
-T
-TerminatedWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
 OrderedFloat32.decode(PositionedByteRangesrc)
 
 
-byte[]
-RawBytes.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRangesrc)
 
 
+byte[]
+OrderedBlob.decode(PositionedByteRangesrc)
+
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRangesrc)
+OrderedInt64.decode(PositionedByteRangesrc)
+
+
+T
+FixedLengthWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRangesrc)
 
 
 byte[]
+RawBytes.decode(PositionedByteRangesrc)
+
+
+byte[]
 OrderedBlobVar.decode(PositionedByteRangesrc)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRangesrc)
+
 

[04/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html 
b/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
index de594cf..2bce902 100644
--- a/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
@@ -127,7 +127,7 @@
 
 
 Direct Known Subclasses:
-AccessDeniedException, ConstraintException, CoprocessorException, 
CorruptHFileException, DoNotRetryRegionException, 
FailedSanityCheckException, FatalConnectionException, HBaseSnapshotException, 
InvalidFamilyOperationException, 
InvalidLabelException, InvalidQuotaSettingsException, LabelAlreadyExistsExcepti
 on, LeaseException, LockTimeoutException, 
NamespaceExistException, NamespaceNotFoundException, NoSuchColumnFamilyException, NotAllMetaRegionsOnlineException, 
OperationConflictException, OutOfOrderScannerNextException, QuotaExceededException, ScannerTimeoutException, ServerTooBusyException, TableExistsException, TableNotDisabledException, TableNotEnabledException, TableNotFoundException, UnknownProtocolException, UnknownScannerException
+AccessDeniedException, ConstraintException, CoprocessorException, 
CorruptHFileException, DoNotRetryRegionException, 
FailedSanityCheckException, FatalConnectionException, HBaseSnapshotException, 
InvalidFamilyOperationException, 
InvalidLabelException, InvalidQuotaSettingsException, LabelAlreadyExistsExcepti
 on, LeaseException, LockTimeoutException, 
NamespaceExistException, NamespaceNotFoundException, NoSuchColumnFamilyException, NotAllMetaRegionsOnlineException, 
OperationConflictException, OutOfOrderScannerNextException, QuotaExceededException, ScannerResetException, 
ScannerTimeoutException, ServerTooBusyException, TableExistsException, TableNotDisabledException, TableNotEnabledException, TableNotFoundException, UnknownProtocolException, UnknownScannerException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html 
b/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
index 044f9e3..94d18c6 100644
--- a/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
+++ b/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
@@ -44,7 +44,7 @@
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -286,7 +286,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html 
b/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html
new file mode 100644
index 000..73e06fd
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html
@@ -0,0 +1,311 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+ExtendedCell (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":6,"i1":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Interface ExtendedCell
+
+
+
+
+
+
+All Superinterfaces:
+Cell, http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, HeapSize, SettableSequenceId, SettableTimestamp
+
+
+All Known Implementing Classes:
+BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 CellUtil.ShareableMemoryTagRewriteCell, CellUtil.TagRewriteCell, KeyValue, KeyValue.KeyOnlyKeyValue<
 /a>, KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryKeyValue,
 

[14/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 63b1466..b0c63ff 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-1845
+1850
 0
 0
-11576
+11585
 
 Files
 
@@ -331,7 +331,7 @@
 org/apache/hadoop/hbase/CellUtil.java
 0
 0
-97
+96
 
 org/apache/hadoop/hbase/ChoreService.java
 0
@@ -383,147 +383,147 @@
 0
 1
 
+org/apache/hadoop/hbase/ExtendedCell.java
+0
+0
+1
+
 org/apache/hadoop/hbase/HBaseConfiguration.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
 0
 27
-
+
 org/apache/hadoop/hbase/HConstants.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/HRegionInfo.java
 0
 0
 58
-
+
 org/apache/hadoop/hbase/HRegionLocation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HTableDescriptor.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/HealthChecker.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/JMXListener.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/KeyValue.java
 0
 0
 135
-
+
 org/apache/hadoop/hbase/KeyValueTestUtil.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/KeyValueUtil.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/LocalHBaseCluster.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/MetaMutationAnnotation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/MetaTableAccessor.java
 0
 0
 116
-
+
 org/apache/hadoop/hbase/NamespaceDescriptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ProcedureUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/RegionLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/RegionLocations.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/RegionStateListener.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ScheduledChore.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/ServerLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ServerName.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/SettableSequenceId.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SettableTimestamp.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogCounters.java
 0
 0
 1
-
-org/apache/hadoop/hbase/SplitLogTask.java
-0
-0
-2
 
-org/apache/hadoop/hbase/Streamable.java
+org/apache/hadoop/hbase/SplitLogTask.java
 0
 0
 2
@@ -626,3047 +626,3057 @@
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
-29
+10
 
-org/apache/hadoop/hbase/client/BufferedMutator.java
+org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
 0
 1
 
+org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+0
+0
+24
+
+org/apache/hadoop/hbase/client/BatchErrors.java
+0
+0
+1
+
+org/apache/hadoop/hbase/client/BufferedMutator.java
+0
+0
+1
+
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
-206
-
+205
+
 org/apache/hadoop/hbase/client/ClientSimpleScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientSmallReversedScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ClientSmallScanner.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ConnectionConfiguration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/CoprocessorHConnection.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
 68
-
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
 25
-
+
 org/apache/hadoop/hbase/client/HTableInterface.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/HTableWrapper.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/Increment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MetaCache.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
 44
-
+
 org/apache/hadoop/hbase/client/MultiAction.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/MultiResponse.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/client/Operation.java
 0
 0
 1
-
+
 

[09/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index a067f67..bba004e 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -689,6 +689,8 @@
 
 The user should override this method, and try to take a 
lock if necessary.
 
+acquireLock(Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor
+
 acquireLock(K)
 - Method in class org.apache.hadoop.hbase.util.KeyLocker
 
 Return a lock for the given key.
@@ -728,7 +730,7 @@
 
 ACTION_BY_CODE
 - Static variable in class org.apache.hadoop.hbase.security.access.Permission
 
-actions
 - Variable in class org.apache.hadoop.hbase.client.AsyncProcess.BatchErrors
+actions
 - Variable in class org.apache.hadoop.hbase.client.BatchErrors
 
 actions
 - Variable in class org.apache.hadoop.hbase.client.DelayingRunner
 
@@ -738,7 +740,7 @@
 
 actions
 - Variable in class org.apache.hadoop.hbase.security.access.Permission
 
-actionsInProgress
 - Variable in class org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl
+actionsInProgress
 - Variable in class org.apache.hadoop.hbase.client.AsyncRequestFutureImpl
 
 activateOptions()
 - Method in class org.apache.hadoop.hbase.AsyncConsoleAppender
 
@@ -792,7 +794,7 @@
 
 Add column and value to this Append operation.
 
-add(Throwable,
 Row, ServerName) - Method in class 
org.apache.hadoop.hbase.client.AsyncProcess.BatchErrors
+add(Throwable,
 Row, ServerName) - Method in class 
org.apache.hadoop.hbase.client.BatchErrors
 
 add(BigDecimal,
 BigDecimal) - Method in class 
org.apache.hadoop.hbase.client.coprocessor.BigDecimalColumnInterpreter
 
@@ -1011,9 +1013,13 @@
 add(T)
 - Method in class org.apache.hadoop.hbase.util.ConcatenatedLists
 
 add(long) 
- Method in class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 add(long)
 - Method in class org.apache.hadoop.hbase.util.Counter.Cell
-
+
+Deprecated.
+
 add(long,
 long) - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
 
 Adds a value to the histogram.
@@ -1934,11 +1940,11 @@
 
 Complete taking the snapshot on the region.
 
-addReplicaActions(int,
 MapServerName, MultiActionRow, 
ListActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
+addReplicaActions(int,
 MapServerName, MultiActionRow, 
ListActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
 
 Add replica actions to action map by server.
 
-addReplicaActionsAgain(ActionRow,
 MapServerName, MultiActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
+addReplicaActionsAgain(ActionRow,
 MapServerName, MultiActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
 
 addReplicas(MasterProcedureEnv,
 HTableDescriptor, ListHRegionInfo) - Static method in class 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure
 
@@ -1955,7 +1961,7 @@
 
 ADDRESS_BITS_PER_WORD
 - Static variable in class org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode
 
-addresses
 - Variable in class org.apache.hadoop.hbase.client.AsyncProcess.BatchErrors
+addresses
 - Variable in class org.apache.hadoop.hbase.client.BatchErrors
 
 Addressing 
- Class in org.apache.hadoop.hbase.util
 
@@ -2051,7 +2057,7 @@
 
 addShutdownHook(Thread,
 int) - Method in class org.apache.hadoop.hbase.util.ShutdownHookManager.ShutdownHookManagerV2
 
-addSingleServerRequestHeapSize(ServerName,
 
AsyncProcess.AsyncRequestFutureImplCResult.SingleServerRequestRunnable)
 - Method in class org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl
+addSingleServerRequestHeapSize(ServerName,
 AsyncRequestFutureImplCResult.SingleServerRequestRunnable) 
- Method in class org.apache.hadoop.hbase.client.AsyncRequestFutureImpl
 
 addSize(RpcCallContext,
 Result, Object) - Method in class 
org.apache.hadoop.hbase.regionserver.RSRpcServices
 
@@ -2343,6 +2349,11 @@
 
 afterLast
 - Variable in class org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArrayScanner
 
+afterReplay(TEnvironment)
 - Method in class org.apache.hadoop.hbase.procedure2.Procedure
+
+Called when the procedure is ready to be added to the queue 
after
+ the loading/replay operation.
+
 ageAtEviction
 - Variable in class org.apache.hadoop.hbase.io.hfile.CacheStats
 
 Keep running age at eviction time
@@ -2530,6 +2541,8 @@
 
 alreadyRunning
 - Variable in class org.apache.hadoop.hbase.master.CatalogJanitor
 
+ALWAYS_COPY_FILES
 - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
+
 amd64 - Static 
variable in class org.apache.hadoop.hbase.util.JVM
 
 ampBytes
 - Static variable in class org.apache.hadoop.hbase.http.HtmlQuoting
@@ -3190,10 

[10/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 539b40f..afdfb4b 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -97,6 +97,16 @@
 use Table instead
 
 
+
+org.apache.hadoop.hbase.SettableSequenceId
+as of 2.0 and will be 
removed in 3.0. Use ExtendedCell 
instead
+
+
+
+org.apache.hadoop.hbase.SettableTimestamp
+as of 2.0 and will be 
removed in 3.0. Use ExtendedCell 
instead
+
+
 
 
 
@@ -113,11 +123,16 @@
 
 
 
+org.apache.hadoop.hbase.util.Counter
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
+
+
+
 org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter
 Deprecated in 2.0. See 
HBASE-13347
 
 
-
+
 org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration
 This class is needed 
migrating TablePermissions written with
  Writables.  It is needed to read old permissions written pre-0.96.  This
@@ -125,53 +140,53 @@
  will have been migrated and written with protobufs.
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.HLogInputFormat
 use WALInputFormat.  Remove in 
hadoop 3.0
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.HLogKey
 use WALKey. Deprecated as 
of 1.0 (HBASE-12522). Remove in 2.0
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.HLogPrettyPrinter
 use the "hbase wal" 
command
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.KVComparator
 : Use CellComparator.
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.MetaComparator
 : CellComparator.META_COMPARATOR
 to be used
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.RawBytesComparator
 Not to be used for any 
comparsions
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.KeyValueCompression
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher.LegacyScanQueryMatcher
 
-
+
 org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
 
 
-
+
 org.apache.hadoop.hbase.zookeeper.ZKLeaderManager
 Not used
 
 
-
+
 org.apache.hadoop.hbase.zookeeper.ZKUtil.NodeAndData
 Unused
 
@@ -413,13 +428,13 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.coprocessor.ObserverContext.createAndPrepare(T,
 ObserverContextT)
@@ -447,13 +462,13 @@
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(ListCell)
 
 
-org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, 
RegionServerServices, User);  as of 1.0.2, remove in 3.0
+org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, 
RegionServerServices, User)
 
 
 
-org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, 
RegionServerServices, User)
+org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, 
RegionServerServices, User);  as of 1.0.2, remove in 3.0
 
 
 
@@ -465,15 +480,15 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(ListGet)
 
 
-org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use Filter.filterRowKey(Cell)
+ Instead use FilterBase.filterRowKey(Cell)
 
 
 
-org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use FilterBase.filterRowKey(Cell)
+ Instead use Filter.filterRowKey(Cell)
 
 
 
@@ -570,10 +585,10 @@
 
 
 
-org.apache.hadoop.hbase.http.HttpServer.getPort()
+org.apache.hadoop.hbase.http.InfoServer.getPort()
 
 
-org.apache.hadoop.hbase.http.InfoServer.getPort()
+org.apache.hadoop.hbase.http.HttpServer.getPort()
 
 
 org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell)
@@ -780,15 +795,15 @@
 
 
 

[37/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 22047b3..145917a 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,45 +107,43 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cellcell)
+MultipleColumnPrefixFilter.filterColumn(Cellcell)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cellcell)
+ColumnPrefixFilter.filterColumn(Cellcell)
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cellv)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+InclusiveStopFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellcell)
+RandomRowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellv)
+DependentColumnFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cellv)
+FirstKeyOnlyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellv)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellv)
+SingleColumnValueFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cellv)
+ColumnPaginationFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
+PageFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
@@ -153,69 +151,71 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellv)
+QualifierFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellv)
+FamilyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+WhileMatchFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellv)
 
 
-Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cellv)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellv)
+MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellkv)
+TimestampsFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellv)
+FuzzyRowFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cellc)
+MultiRowRangeFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellv)
+SkipFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellv)
+ColumnPrefixFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellv)
+RowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellv)
+PrefixFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cellignored)
-
-
-Filter.ReturnCode
 FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
 Deprecated.
 
 
+
+Filter.ReturnCode
+ColumnRangeFilter.filterKeyValue(Cellkv)
+
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cellignored)
+ColumnCountGetFilter.filterKeyValue(Cellv)
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 9d8a596..a41faf9 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -140,11 +140,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Scan.getFilter()
+Query.getFilter()
 
 
 Filter
-Query.getFilter()
+Scan.getFilter()
 
 
 
@@ -156,19 +156,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Query
+Query.setFilter(Filterfilter)
+Apply the specified server-side filter when performing the 
Query.
+
+
+
 Get
 Get.setFilter(Filterfilter)
 
-
+
 Scan
 Scan.setFilter(Filterfilter)
 
-
-Query
-Query.setFilter(Filterfilter)
-Apply the specified server-side filter when performing the 
Query.
-
-
 
 
 
@@ -390,59 +390,59 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter

[22/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index 908b8b7..9e44e11 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -123,1019 +123,1128 @@
 115  private static final String 
ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
 116  public final static String 
CREATE_TABLE_CONF_KEY = "create.table";
 117  public final static String 
SILENCE_CONF_KEY = "ignore.unmatched.families";
-118
-119  // We use a '.' prefix which is ignored 
when walking directory trees
-120  // above. It is invalid family name.
-121  final static String TMP_DIR = ".tmp";
-122
-123  private int 
maxFilesPerRegionPerFamily;
-124  private boolean assignSeqIds;
-125  private SetString 
unmatchedFamilies = new HashSetString();
-126
-127  // Source filesystem
-128  private FileSystem fs;
-129  // Source delegation token
-130  private FsDelegationToken 
fsDelegationToken;
-131  private String bulkToken;
-132  private UserProvider userProvider;
-133  private int nrThreads;
-134  private RpcControllerFactory 
rpcControllerFactory;
-135
-136  private LoadIncrementalHFiles() {}
-137
-138  public 
LoadIncrementalHFiles(Configuration conf) throws Exception {
-139super(conf);
-140this.rpcControllerFactory = new 
RpcControllerFactory(conf);
-141initialize();
-142  }
-143
-144  private void initialize() throws 
Exception {
-145if (initalized) {
-146  return;
-147}
-148// make a copy, just to be sure we're 
not overriding someone else's config
-149
setConf(HBaseConfiguration.create(getConf()));
-150Configuration conf = getConf();
-151// disable blockcache for tool 
invocation, see HBASE-10500
-152
conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);
-153this.userProvider = 
UserProvider.instantiate(conf);
-154this.fsDelegationToken = new 
FsDelegationToken(userProvider, "renewer");
-155assignSeqIds = 
conf.getBoolean(ASSIGN_SEQ_IDS, true);
-156maxFilesPerRegionPerFamily = 
conf.getInt(MAX_FILES_PER_REGION_PER_FAMILY, 32);
-157nrThreads = 
conf.getInt("hbase.loadincremental.threads.max",
-158  
Runtime.getRuntime().availableProcessors());
-159initalized = true;
-160  }
-161
-162  private void usage() {
-163System.err.println("usage: " + NAME + 
" /path/to/hfileoutputformat-output tablename" + "\n -D"
-164+ CREATE_TABLE_CONF_KEY + "=no - 
can be used to avoid creation of table by this tool\n"
-165+ "  Note: if you set this to 
'no', then the target table must already exist in HBase\n -D"
-166+ SILENCE_CONF_KEY + "=yes - can 
be used to ignore unmatched column families\n"
-167+ "\n");
-168  }
-169
-170  private interface 
BulkHFileVisitorTFamily {
-171TFamily bulkFamily(final byte[] 
familyName)
-172  throws IOException;
-173void bulkHFile(final TFamily family, 
final FileStatus hfileStatus)
-174  throws IOException;
-175  }
-176
-177  /**
-178   * Iterate over the bulkDir hfiles.
-179   * Skip reference, HFileLink, files 
starting with "_" and non-valid hfiles.
-180   */
-181  private static TFamily void 
visitBulkHFiles(final FileSystem fs, final Path bulkDir,
-182final BulkHFileVisitorTFamily 
visitor) throws IOException {
-183visitBulkHFiles(fs, bulkDir, visitor, 
true);
-184  }
-185
-186  /**
-187   * Iterate over the bulkDir hfiles.
-188   * Skip reference, HFileLink, files 
starting with "_".
-189   * Check and skip non-valid hfiles by 
default, or skip this validation by setting
-190   * 
'hbase.loadincremental.validate.hfile' to false.
-191   */
-192  private static TFamily void 
visitBulkHFiles(final FileSystem fs, final Path bulkDir,
-193final BulkHFileVisitorTFamily 
visitor, final boolean validateHFile) throws IOException {
-194if (!fs.exists(bulkDir)) {
-195  throw new 
FileNotFoundException("Bulkload dir " + bulkDir + " not found");
-196}
-197
-198FileStatus[] familyDirStatuses = 
fs.listStatus(bulkDir);
-199if (familyDirStatuses == null) {
-200  throw new FileNotFoundException("No 
families found in " + bulkDir);
-201}
-202
-203for (FileStatus familyStat : 
familyDirStatuses) {
-204  if (!familyStat.isDirectory()) {
-205LOG.warn("Skipping non-directory 
" + familyStat.getPath());
-206continue;
-207  }
-208  Path familyDir = 
familyStat.getPath();
-209  byte[] familyName = 
familyDir.getName().getBytes();
-210  // Skip invalid family
-211  try {
-212
HColumnDescriptor.isLegalFamilyName(familyName);
-213  }
-214  catch 

[48/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 583aacc..5ef666b 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160916144607)
-  /CreationDate (D:20160916144607)
+  /ModDate (D:20160929151030)
+  /CreationDate (D:20160929151030)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 14631ff..8c9dd06 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -278,6 +278,7 @@
 RpcRetryingCaller
 RSGroupInfo
 Scan
+ScannerResetException
 ScannerTimeoutException
 ScheduledChore
 SecurityCapability

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index 0597dcd..b40ba1f 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -278,6 +278,7 @@
 RpcRetryingCaller
 RSGroupInfo
 Scan
+ScannerResetException
 ScannerTimeoutException
 ScheduledChore
 SecurityCapability



[28/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
index d607296..e22025b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
@@ -57,616 +57,615 @@
 049import 
org.apache.hadoop.hbase.TableName;
 050import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 051import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-052import 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
-053import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-054import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-055
-056/**
-057 * HTableMultiplexer provides a 
thread-safe non blocking PUT API across all the tables.
-058 * Each put will be sharded into 
different buffer queues based on its destination region server.
-059 * So each region server buffer queue 
will only have the puts which share the same destination.
-060 * And each queue will have a flush 
worker thread to flush the puts request to the region server.
-061 * If any queue is full, the 
HTableMultiplexer starts to drop the Put requests for that
-062 * particular queue.
-063 *
-064 * Also all the puts will be retried as a 
configuration number before dropping.
-065 * And the HTableMultiplexer can report 
the number of buffered requests and the number of the
-066 * failed (dropped) requests in total or 
on per region server basis.
-067 *
-068 * This class is thread safe.
-069 */
-070@InterfaceAudience.Public
-071@InterfaceStability.Evolving
-072public class HTableMultiplexer {
-073  private static final Log LOG = 
LogFactory.getLog(HTableMultiplexer.class.getName());
-074
-075  public static final String 
TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
-076  
"hbase.tablemultiplexer.flush.period.ms";
-077  public static final String 
TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads";
-078  public static final String 
TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE =
-079  
"hbase.client.max.retries.in.queue";
-080
-081  /** The map between each region server 
to its flush worker */
-082  private final MapHRegionLocation, 
FlushWorker serverToFlushWorkerMap =
-083  new ConcurrentHashMap();
-084
-085  private final Configuration 
workerConf;
-086  private final ClusterConnection conn;
-087  private final ExecutorService pool;
-088  private final int maxAttempts;
-089  private final int 
perRegionServerBufferQueueSize;
-090  private final int maxKeyValueSize;
-091  private final ScheduledExecutorService 
executor;
-092  private final long flushPeriod;
-093
-094  /**
-095   * @param conf The HBaseConfiguration
-096   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-097   *  each region server before 
dropping the request.
-098   */
-099  public HTableMultiplexer(Configuration 
conf, int perRegionServerBufferQueueSize)
-100  throws IOException {
-101
this(ConnectionFactory.createConnection(conf), conf, 
perRegionServerBufferQueueSize);
-102  }
-103
-104  /**
-105   * @param conn The HBase connection.
-106   * @param conf The HBase 
configuration
-107   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-108   *  each region server before 
dropping the request.
-109   */
-110  public HTableMultiplexer(Connection 
conn, Configuration conf,
-111  int perRegionServerBufferQueueSize) 
{
-112this.conn = (ClusterConnection) 
conn;
-113this.pool = 
HTable.getDefaultExecutor(conf);
-114// how many times we could try in 
total, one more than retry number
-115this.maxAttempts = 
conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-116
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + 1;
-117this.perRegionServerBufferQueueSize = 
perRegionServerBufferQueueSize;
-118this.maxKeyValueSize = 
HTable.getMaxKeyValueSize(conf);
-119this.flushPeriod = 
conf.getLong(TABLE_MULTIPLEXER_FLUSH_PERIOD_MS, 100);
-120int initThreads = 
conf.getInt(TABLE_MULTIPLEXER_INIT_THREADS, 10);
-121this.executor =
-122
Executors.newScheduledThreadPool(initThreads,
-123  new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("HTableFlushWorker-%d").build());
-124
-125this.workerConf = 
HBaseConfiguration.create(conf);
-126// We do not do the retry because we 
need to reassign puts to different queues if regions are
-127// moved.
-128
this.workerConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 0);
-129  }
-130
-131  /**

[45/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/org/apache/hadoop/hbase/CellUtil.html
index 435e068..e2a2306 100644
--- a/apidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":41,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":41,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":41,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":41,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":41,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":41,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public final class CellUtil
+public final class CellUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Utility methods helpful slinging Cell instances.
  Some methods below are for internal use only and are marked 
InterfaceAudience.Private at the
@@ -295,23 +295,45 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static Cell
+createFirstOnRow(byte[]row)
+
+
+static Cell
+createFirstOnRow(byte[]row,
+byte[]family,
+byte[]col)
+
+
+static Cell
 createFirstOnRow(byte[]row,
 introffset,
 shortrlength)
 
-
+
+static Cell
+createFirstOnRow(byte[]row,
+introffset,
+shortrlength,
+byte[]family,
+intfoffset,
+byteflength,
+byte[]col,
+intcoffset,
+intclength)
+
+
 static Cell
 createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell,
byte[]qArray,
@@ -321,85 +343,89 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  passed qualifier.
 
 
-
+
 static Cell
 createFirstOnRowColTS(Cellcell,
  longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
-
+
+static Cell
+createLastOnRow(byte[]row)
+
+
 static Cell
 createLastOnRow(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createLastOnRowCol(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's rk:cf:q.
 
 
-
+
 static boolean
 equals(Cella,
   Cellb)
 equals
 
 
-
+
 static boolean
 equalsIgnoreMvccVersion(Cella,
Cellb)
 special case for Cell.equals
 
 
-
+
 static long
 estimatedHeapSizeOf(Cellcell)
 This is an estimate of the heap space occupied by a 
cell.
 
 
-
+
 static int
 estimatedSerializedSizeOf(Cellcell)
 Estimate based on keyvalue's serialization format.
 
 
-
+
 static int
 estimatedSerializedSizeOfKey(Cellcell)
 Calculates the serialized key size.
 
 
-
+
 static ByteRange
 fillFamilyRange(Cellcell,
ByteRangerange)
 
-
+
 static ByteRange
 

[16/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
index 01ae315..3cb1f0b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
@@ -34,188 +34,190 @@
 026
 027/**
 028 * High scalable counter. Thread safe.
-029 */
-030@InterfaceAudience.Public
-031@InterfaceStability.Evolving
-032public class Counter {
-033  private static final int 
MAX_CELLS_LENGTH = 1  20;
-034
-035  private static class Cell {
-036// Pads are added around the value to 
avoid cache-line contention with
-037// another cell's value. The 
cache-line size is expected to be equal to or
-038// less than about 128 Bytes (= 64 
Bits * 16).
-039
-040@SuppressWarnings("unused")
-041volatile long p0, p1, p2, p3, p4, p5, 
p6;
-042volatile long value;
-043@SuppressWarnings("unused")
-044volatile long q0, q1, q2, q3, q4, q5, 
q6;
-045
-046static final 
AtomicLongFieldUpdaterCell valueUpdater =
-047
AtomicLongFieldUpdater.newUpdater(Cell.class, "value");
-048
-049Cell() {}
+029 * @deprecated use {@link 
java.util.concurrent.atomic.LongAdder} instead.
+030 */
+031@InterfaceAudience.Public
+032@InterfaceStability.Evolving
+033@Deprecated
+034public class Counter {
+035  private static final int 
MAX_CELLS_LENGTH = 1  20;
+036
+037  private static class Cell {
+038// Pads are added around the value to 
avoid cache-line contention with
+039// another cell's value. The 
cache-line size is expected to be equal to or
+040// less than about 128 Bytes (= 64 
Bits * 16).
+041
+042@SuppressWarnings("unused")
+043volatile long p0, p1, p2, p3, p4, p5, 
p6;
+044volatile long value;
+045@SuppressWarnings("unused")
+046volatile long q0, q1, q2, q3, q4, q5, 
q6;
+047
+048static final 
AtomicLongFieldUpdaterCell valueUpdater =
+049
AtomicLongFieldUpdater.newUpdater(Cell.class, "value");
 050
-051Cell(long initValue) {
-052  value = initValue;
-053}
-054
-055long get() {
-056  return value;
-057}
-058
-059boolean add(long delta) {
-060  long current = value;
-061  return 
valueUpdater.compareAndSet(this, current, current + delta);
-062}
-063  }
-064
-065  private static class Container {
-066/** The length should be a power of 
2. */
-067final Cell[] cells;
-068
-069/** True if a new extended container 
is going to replace this. */
-070final AtomicBoolean demoted = new 
AtomicBoolean();
-071
-072Container(Cell cell) {
-073  this(new Cell[] { cell });
-074}
-075
-076/**
-077 * @param cells the length should be 
a power of 2
-078 */
-079Container(Cell[] cells) {
-080  this.cells = cells;
-081}
-082  }
-083
-084  private final 
AtomicReferenceContainer containerRef;
+051Cell() {}
+052
+053Cell(long initValue) {
+054  value = initValue;
+055}
+056
+057long get() {
+058  return value;
+059}
+060
+061boolean add(long delta) {
+062  long current = value;
+063  return 
valueUpdater.compareAndSet(this, current, current + delta);
+064}
+065  }
+066
+067  private static class Container {
+068/** The length should be a power of 
2. */
+069final Cell[] cells;
+070
+071/** True if a new extended container 
is going to replace this. */
+072final AtomicBoolean demoted = new 
AtomicBoolean();
+073
+074Container(Cell cell) {
+075  this(new Cell[] { cell });
+076}
+077
+078/**
+079 * @param cells the length should be 
a power of 2
+080 */
+081Container(Cell[] cells) {
+082  this.cells = cells;
+083}
+084  }
 085
-086  public Counter() {
-087this(new Cell());
-088  }
-089
-090  public Counter(long initValue) {
-091this(new Cell(initValue));
-092  }
-093
-094  private Counter(Cell initCell) {
-095containerRef = new 
AtomicReferenceContainer(new Container(initCell));
-096  }
-097
-098  private static int hash() {
-099// The logic is borrowed from 
high-scale-lib's ConcurrentAutoTable.
-100
-101int h = 
System.identityHashCode(Thread.currentThread());
-102// You would think that 
System.identityHashCode on the current thread
-103// would be a good hash fcn, but 
actually on SunOS 5.8 it is pretty lousy
-104// in the low bits.
-105
-106h ^= (h  20) ^ (h 
 12); // Bit spreader, borrowed from Doug Lea
-107h ^= (h   7) ^ (h 
  4);
-108return h;
-109  }
-110
-111  private static class IndexHolder {
-112int index = hash();
-113  }
-114
-115  private final 
ThreadLocalIndexHolder indexHolderThreadLocal =
-116  new 
ThreadLocalIndexHolder() {
-117@Override
-118protected IndexHolder initialValue() 
{
-119  

[36/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html
new file mode 100644
index 000..0fbedc5
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html
@@ -0,0 +1,124 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+org.apache.hadoop.hbase.jetty (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevPackage
+NextPackage
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Packageorg.apache.hadoop.hbase.jetty
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevPackage
+NextPackage
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html
new file mode 100644
index 000..d324d1a
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html
@@ -0,0 +1,128 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+org.apache.hadoop.hbase.jetty Class Hierarchy (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Hierarchy For Package org.apache.hadoop.hbase.jetty
+Package Hierarchies:
+
+All Packages
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/jetty/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/jetty/package-use.html 
b/apidocs/org/apache/hadoop/hbase/jetty/package-use.html
new file mode 100644
index 000..5be6266
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/jetty/package-use.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Package org.apache.hadoop.hbase.jetty (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses 

[43/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index e29802c..0454504 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -163,24 +163,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+Staging dir used by bulk load
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 The name of the configuration parameter that specifies
  the number of bytes in a newly created checksum chunk.
 
 
-
+
 static byte[]
 CATALOG_FAMILY
 The catalog family
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 The catalog family as a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 The name of the configuration parameter that specifies
@@ -188,668 +194,668 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  for newly created blocks.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 Default cipher for encryption
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 The ZK client port key in the ZK properties map.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 Cluster is in distributed mode or not
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 Default value for cluster ID
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 name of the file for unique cluster ID
 
 
-
+
 static boolean
 CLUSTER_IS_DISTRIBUTED
 Cluster is fully-distributed
 
 
-
+
 static boolean
 CLUSTER_IS_LOCAL
 Cluster is standalone or pseudo-distributed
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 Parameter name for the maximum batch of KVs to be used in 
flushes and compactions
 
 
-
+
 static int
 COMPACTION_KV_MAX_DEFAULT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PARAM_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
 
  Pattern that matches a coprocessor specification.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 Configuration key for the name of the alternate cipher 
algorithm for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 

[38/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 0b4060d..d3618d5 100644
--- a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -522,7 +522,7 @@ publicvoid
 parseTableCFsFromConfig
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public statichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringparseTableCFsFromConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCFsConfig)
+public statichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringparseTableCFsFromConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCFsConfig)
 Deprecated.as release of 2.0.0, and it will be removed in 
3.0.0
 
 
@@ -532,7 +532,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 updatePeerConfig
-publicvoidupdatePeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+publicvoidupdatePeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
  ReplicationPeerConfigpeerConfig)
   throws ReplicationException
 
@@ -547,7 +547,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 removePeer
-publicvoidremovePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
+publicvoidremovePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
 throws ReplicationException
 Removes a peer cluster and stops the replication to 
it.
 
@@ -564,7 +564,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 enablePeer
-publicvoidenablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
+publicvoidenablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
 throws ReplicationException
 Restart the replication stream to the specified peer.
 
@@ -581,7 +581,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 disablePeer
-publicvoiddisablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
+publicvoiddisablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
  throws ReplicationException
 Stop the replication stream to the specified peer.
 
@@ -598,7 +598,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 getPeersCount
-publicintgetPeersCount()
+publicintgetPeersCount()
 Get the number of slave clusters the local cluster 
has.
 
 Returns:
@@ -612,7 +612,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 listPeerConfigs
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationPeerConfiglistPeerConfigs()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationPeerConfiglistPeerConfigs()
 
 
 
@@ -621,7 +621,7 @@ 

[40/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index dbc9edd..d0a7402 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -348,6 +348,13 @@
 
 
 class
+ScannerResetException
+Thrown when the server side has received an Exception, and 
asks the Client to reset the scanner
+ state by closing the current region scanner, and reopening from the start of 
last seen row.
+
+
+
+class
 UnknownProtocolException
 An error requesting an RPC protocol that the server is not 
serving.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 53abc29..fe99dcf 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -359,13 +359,13 @@
 
 
 long
-ExponentialClientBackoffPolicy.getBackoffTime(ServerNameserverName,
+ClientBackoffPolicy.getBackoffTime(ServerNameserverName,
   byte[]region,
   
org.apache.hadoop.hbase.client.backoff.ServerStatisticsstats)
 
 
 long
-ClientBackoffPolicy.getBackoffTime(ServerNameserverName,
+ExponentialClientBackoffPolicy.getBackoffTime(ServerNameserverName,
   byte[]region,
   
org.apache.hadoop.hbase.client.backoff.ServerStatisticsstats)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index addd970..c9878ce 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -399,14 +399,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
@@ -1055,11 +1055,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
+int
+LoadIncrementalHFiles.run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringdirPath,
+   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
+   TableNametableName)
+
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem
-LoadIncrementalHFiles.tryAtomicRegionLoad(Connectionconn,
+LoadIncrementalHFiles.tryAtomicRegionLoad(Connectionconn,
TableNametableName,
byte[]first,
-   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemlqis)
+   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemlqis,
+   booleancopyFile)
 Attempts to do an atomic load of many hfiles into a 
region.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 1bbaf83..fecd7b1 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -133,6 

[30/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
index 7234f1e..a78e2b5 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
@@ -31,362 +31,362 @@
 023
 024import java.io.DataOutputStream;
 025import java.io.IOException;
-026import java.math.BigDecimal;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Iterator;
-031import java.util.List;
-032import java.util.Map.Entry;
-033import java.util.NavigableMap;
-034
-035import 
org.apache.hadoop.hbase.KeyValue.Type;
-036import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.classification.InterfaceAudience.Private;
-038import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-039import 
org.apache.hadoop.hbase.io.HeapSize;
-040import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.ByteRange;
-043import 
org.apache.hadoop.hbase.util.Bytes;
-044import 
org.apache.hadoop.hbase.util.ClassSize;
-045
-046/**
-047 * Utility methods helpful slinging 
{@link Cell} instances.
-048 * Some methods below are for internal 
use only and are marked InterfaceAudience.Private at the
-049 * method level.
-050 */
-051@InterfaceAudience.Public
-052@InterfaceStability.Evolving
-053public final class CellUtil {
-054
-055  /**
-056   * Private constructor to keep this 
class from being instantiated.
-057   */
-058  private CellUtil(){}
-059
-060  /*** ByteRange 
***/
-061
-062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-064  }
-065
-066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-067return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-068  }
-069
-070  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-071return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-072  cell.getQualifierLength());
-073  }
-074
-075  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-076return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-077  }
-078
-079  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-080return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-081  }
-082
-083  /* get individual 
arrays for tests /
-084
-085  public static byte[] cloneRow(Cell 
cell){
-086byte[] output = new 
byte[cell.getRowLength()];
-087copyRowTo(cell, output, 0);
-088return output;
-089  }
-090
-091  public static byte[] cloneFamily(Cell 
cell){
-092byte[] output = new 
byte[cell.getFamilyLength()];
-093copyFamilyTo(cell, output, 0);
-094return output;
-095  }
-096
-097  public static byte[] 
cloneQualifier(Cell cell){
-098byte[] output = new 
byte[cell.getQualifierLength()];
-099copyQualifierTo(cell, output, 0);
-100return output;
-101  }
-102
-103  public static byte[] cloneValue(Cell 
cell){
-104byte[] output = new 
byte[cell.getValueLength()];
-105copyValueTo(cell, output, 0);
-106return output;
-107  }
-108
-109  public static byte[] cloneTags(Cell 
cell) {
-110byte[] output = new 
byte[cell.getTagsLength()];
-111copyTagTo(cell, output, 0);
-112return output;
-113  }
-114
-115  /**
-116   * Returns tag value in a new byte 
array. If server-side, use
-117   * {@link Tag#getValueArray()} with 
appropriate {@link Tag#getValueOffset()} and
-118   * {@link Tag#getValueLength()} instead 
to save on allocations.
-119   * @param cell
-120   * @return tag value in a new byte 
array.
-121   */
-122  public static byte[] getTagArray(Cell 
cell){
-123byte[] output = new 
byte[cell.getTagsLength()];
-124copyTagTo(cell, output, 0);
-125return output;
-126  }
-127
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.Iterator;
+032import java.util.List;
+033import java.util.Map.Entry;
+034import java.util.NavigableMap;
+035
+036import 
org.apache.hadoop.hbase.KeyValue.Type;
+037import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+038import 
org.apache.hadoop.hbase.classification.InterfaceAudience.Private;
+039import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+040import 
org.apache.hadoop.hbase.io.HeapSize;
+041import 

[52/52] hbase-site git commit: Empty commit

2016-09-29 Thread dimaspivak
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/a16440ac
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/a16440ac
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/a16440ac

Branch: refs/heads/asf-site
Commit: a16440acb475281df7f185c6cf4c920c88b05963
Parents: 044b337
Author: Dima Spivak 
Authored: Thu Sep 29 16:08:37 2016 -0400
Committer: Dima Spivak 
Committed: Thu Sep 29 16:08:37 2016 -0400

--

--




[49/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index bd7f8a5..068b048 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160916144419+00'00')
-/ModDate (D:20160916144419+00'00')
+/CreationDate (D:20160929150855+00'00')
+/ModDate (D:20160929150855+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 4013 0 R
-/PageLabels 4217 0 R
+/Outlines 4009 0 R
+/PageLabels 4213 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 668
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 135 0 R 141 0 R 148 0 R 150 0 R 154 0 R 157 0 
R 167 0 R 174 0 R 190 0 R 194 0 R 198 0 R 200 0 R 204 0 R 210 0 R 212 0 R 214 0 
R 216 0 R 218 0 R 221 0 R 227 0 R 229 0 R 232 0 R 234 0 R 236 0 R 238 0 R 240 0 
R 242 0 R 246 0 R 249 0 R 252 0 R 254 0 R 256 0 R 258 0 R 260 0 R 262 0 R 264 0 
R 270 0 R 273 0 R 275 0 R 277 0 R 279 0 R 284 0 R 288 0 R 291 0 R 296 0 R 299 0 
R 303 0 R 318 0 R 328 0 R 334 0 R 345 0 R 355 0 R 360 0 R 362 0 R 364 0 R 375 0 
R 380 0 R 384 0 R 389 0 R 393 0 R 404 0 R 416 0 R 431 0 R 437 0 R 439 0 R 441 0 
R 448 0 R 459 0 R 470 0 R 481 0 R 484 0 R 487 0 R 491 0 R 495 0 R 498 0 R 501 0 
R 503 0 R 506 0 R 510 0 R 512 0 R 516 0 R 520 0 R 526 0 R 530 0 R 532 0 R 538 0 
R 540 0 R 544 0 R 552 0 R 554 0 R 557 0 R 560 0 R 563 0 R 566 0 R 581 0 R 588 0 
R 595 0 R 606 0 R 613 0 R 622 0 R 630 0
  R 633 0 R 637 0 R 640 0 R 652 0 R 660 0 R 666 0 R 671 0 R 675 0 R 677 0 R 691 
0 R 703 0 R 709 0 R 715 0 R 718 0 R 726 0 R 734 0 R 739 0 R 744 0 R 750 0 R 752 
0 R 754 0 R 756 0 R 764 0 R 773 0 R 777 0 R 784 0 R 792 0 R 798 0 R 802 0 R 809 
0 R 813 0 R 818 0 R 826 0 R 828 0 R 832 0 R 843 0 R 848 0 R 850 0 R 853 0 R 857 
0 R 863 0 R 866 0 R 878 0 R 882 0 R 887 0 R 895 0 R 900 0 R 904 0 R 908 0 R 910 
0 R 913 0 R 915 0 R 919 0 R 921 0 R 924 0 R 929 0 R 933 0 R 938 0 R 942 0 R 945 
0 R 947 0 R 954 0 R 958 0 R 963 0 R 976 0 R 980 0 R 984 0 R 989 0 R 991 0 R 
1000 0 R 1003 0 R 1008 0 R 1011 0 R 1020 0 R 1023 0 R 1029 0 R 1036 0 R 1039 0 
R 1041 0 R 1050 0 R 1052 0 R 1054 0 R 1057 0 R 1059 0 R 1061 0 R 1063 0 R 1065 
0 R 1067 0 R 1070 0 R 1073 0 R 1078 0 R 1081 0 R 1083 0 R 1085 0 R 1087 0 R 
1092 0 R 1101 0 R 1104 0 R 1106 0 R 1108 0 R 1113 0 R 1115 0 R 1118 0 R 1120 0 
R 1122 0 R 1124 0 R 1127 0 R 1132 0 R 1138 0 R 1145 0 R 1150 0 R 1164 0 R 1175 
0 R 1179 0 R 1192 0 R 1201 0 R 1217 0 R 1221 0 R 1
 231 0 R 1244 0 R 1247 0 R 1259 0 R 1268 0 R 1276 0 R 1280 0 R 1289 0 R 1294 0 
R 1298 0 R 1304 0 R 1310 0 R 1317 0 R 1325 0 R 1327 0 R 1339 0 R 1341 0 R 1346 
0 R 1350 0 R 1355 0 R 1365 0 R 1371 0 R 1377 0 R 1379 0 R 1381 0 R 1393 0 R 
1400 0 R 1409 0 R 1415 0 R 1429 0 R 1437 0 R 1441 0 R 1450 0 R 1458 0 R 1466 0 
R 1472 0 R 1476 0 R 1479 0 R 1481 0 R 1490 0 R 1493 0 R 1500 0 R 1504 0 R 1507 
0 R 1515 0 R 1519 0 R 1522 0 R 1524 0 R 1533 0 R 1540 0 R 1546 0 R 1551 0 R 
1555 0 R 1558 0 R 1564 0 R 1569 0 R 1574 0 R 1576 0 R 1578 0 R 1581 0 R 1583 0 
R 1592 0 R 1595 0 R 1601 0 R 1608 0 R 1612 0 R 1618 0 R 1621 0 R 1623 0 R 1628 
0 R 1631 0 R 1633 0 R 1635 0 R 1637 0 R 1644 0 R 1654 0 R 1656 0 R 1658 0 R 
1660 0 R 1662 0 R 1666 0 R 1668 0 R 1670 0 R 1672 0 R 1675 0 R 1677 0 R 1679 0 
R 1681 0 R 1685 0 R 1689 0 R 1698 0 R 1700 0 R 1702 0 R 1704 0 R 1706 0 R 1712 
0 R 1714 0 R 1719 0 R 1721 0 R 1723 0 R 1730 0 R 1735 0 R 1739 0 R 1743 0 R 
1746 0 R 1749 0 R 1754 0 R 1756 0 R 1759 0 R 1761 0 R 1763 0 R
  1765 0 R 1769 0 R 1771 0 R 1775 0 R 1777 0 R 1779 0 R 1781 0 R 1783 0 R 1790 
0 R 1793 0 R 1798 0 R 1800 0 R 1802 0 R 1804 0 R 1806 0 R 1814 0 R 1824 0 R 
1827 0 R 1843 0 R 1858 0 R 1862 0 R 1867 0 R 1871 0 R 1874 0 R 1879 0 R 1881 0 
R 1888 0 R 1890 0 R 1893 0 R 1895 0 R 1897 0 R 1899 0 R 1901 0 R 1905 0 R 1907 
0 R 1915 0 R 1923 0 R 1929 0 R 1940 0 R 1954 0 R 1966 0 R 1985 0 R 1987 0 R 
1989 0 R 1993 0 R 2010 0 R 2018 0 R 2025 0 R 2034 0 R 2038 0 R 2048 0 R 2059 0 
R 2065 0 R 2074 0 R 2087 0 R 2104 0 R 2114 0 R 2117 0 R 2126 0 R 2141 0 R 2148 
0 R 2151 0 R 2156 0 R 2161 0 R 2171 0 R 2179 0 R 2182 0 R 2184 0 R 2188 0 R 
2201 0 R 2209 0 R 2215 0 R 2219 0 R  0 R 2224 0 R 2226 0 R 2228 0 R 2230 0 
R 2235 0 R 2237 0 R 2247 0 R 2257 0 R 2264 0 R 2276 0 R 2281 0 R 2285 0 R 2297 
0 R 2304 0 R 2310 0 R 2312 0 R 2323 0 R 2330 0 R 2341 0 R 2345 0 R 2354 0 R 
2361 0 R 2371 0 R 2379 0 R 2388 0 R 2394 0 R 2399 0 R 2404 0 R 2407 0 R 2409 0 
R 2415 0 R 2419 0 R 2423 0 R 2429 0 R 2436 0 R 2441 0 R 2445 0
  R 2454 0 R 2459 0 R 2464 0 R 2477 0 R 

[05/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.html
index 0a09cb8..f2a8a69 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":41,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":41,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":41,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":41,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":41,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":41,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9,"i108":9,"i109":9,"i110":9,"i111":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public final class CellUtil
+public final class CellUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Utility methods helpful slinging Cell instances.
  Some methods below are for internal use only and are marked 
InterfaceAudience.Private at the
@@ -451,23 +451,45 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static Cell
+createFirstOnRow(byte[]row)
+
+
+static Cell
+createFirstOnRow(byte[]row,
+byte[]family,
+byte[]col)
+
+
+static Cell
 createFirstOnRow(byte[]row,
 introffset,
 shortrlength)
 
-
+
+static Cell
+createFirstOnRow(byte[]row,
+introffset,
+shortrlength,
+byte[]family,
+intfoffset,
+byteflength,
+byte[]col,
+intcoffset,
+intclength)
+
+
 static Cell
 createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell,
byte[]qArray,
@@ -477,85 +499,89 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  passed qualifier.
 
 
-
+
 static Cell
 createFirstOnRowColTS(Cellcell,
  longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
-
+
+static Cell
+createLastOnRow(byte[]row)
+
+
 static Cell
 createLastOnRow(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createLastOnRowCol(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's rk:cf:q.
 
 
-
+
 static boolean
 equals(Cella,
   Cellb)
 equals
 
 
-
+
 static boolean
 equalsIgnoreMvccVersion(Cella,
Cellb)
 special case for Cell.equals
 
 
-
+
 static long
 estimatedHeapSizeOf(Cellcell)
 This is an estimate of the heap space occupied by a 
cell.
 
 
-
+
 static int
 estimatedSerializedSizeOf(Cellcell)
 Estimate based on keyvalue's serialization format.
 
 
-
+
 static int
 

[29/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index b222809..1f34ddb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -237,81 +237,81 @@
 229  /** Default value for ZooKeeper session 
timeout */
 230  public static final int 
DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;
 231
-232  /** Configuration key for whether to 
use ZK.multi */
-233  public static final String 
ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";
+232  /** Parameter name for port region 
server listens on. */
+233  public static final String 
REGIONSERVER_PORT = "hbase.regionserver.port";
 234
-235  /** Parameter name for port region 
server listens on. */
-236  public static final String 
REGIONSERVER_PORT = "hbase.regionserver.port";
+235  /** Default port region server listens 
on. */
+236  public static final int 
DEFAULT_REGIONSERVER_PORT = 16020;
 237
-238  /** Default port region server listens 
on. */
-239  public static final int 
DEFAULT_REGIONSERVER_PORT = 16020;
+238  /** default port for region server web 
api */
+239  public static final int 
DEFAULT_REGIONSERVER_INFOPORT = 16030;
 240
-241  /** default port for region server web 
api */
-242  public static final int 
DEFAULT_REGIONSERVER_INFOPORT = 16030;
-243
-244  /** A configuration key for 
regionserver info port */
-245  public static final String 
REGIONSERVER_INFO_PORT =
-246"hbase.regionserver.info.port";
-247
-248  /** A flag that enables automatic 
selection of regionserver info port */
-249  public static final String 
REGIONSERVER_INFO_PORT_AUTO =
-250  REGIONSERVER_INFO_PORT + ".auto";
+241  /** A configuration key for 
regionserver info port */
+242  public static final String 
REGIONSERVER_INFO_PORT =
+243"hbase.regionserver.info.port";
+244
+245  /** A flag that enables automatic 
selection of regionserver info port */
+246  public static final String 
REGIONSERVER_INFO_PORT_AUTO =
+247  REGIONSERVER_INFO_PORT + ".auto";
+248
+249  /** Parameter name for what region 
server implementation to use. */
+250  public static final String 
REGION_SERVER_IMPL= "hbase.regionserver.impl";
 251
-252  /** Parameter name for what region 
server implementation to use. */
-253  public static final String 
REGION_SERVER_IMPL= "hbase.regionserver.impl";
+252  /** Parameter name for what master 
implementation to use. */
+253  public static final String MASTER_IMPL= 
"hbase.master.impl";
 254
-255  /** Parameter name for what master 
implementation to use. */
-256  public static final String MASTER_IMPL= 
"hbase.master.impl";
+255  /** Parameter name for what hbase 
client implementation to use. */
+256  public static final String 
HBASECLIENT_IMPL= "hbase.hbaseclient.impl";
 257
-258  /** Parameter name for what hbase 
client implementation to use. */
-259  public static final String 
HBASECLIENT_IMPL= "hbase.hbaseclient.impl";
+258  /** Parameter name for how often 
threads should wake up */
+259  public static final String 
THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";
 260
-261  /** Parameter name for how often 
threads should wake up */
-262  public static final String 
THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";
+261  /** Default value for thread wake 
frequency */
+262  public static final int 
DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;
 263
-264  /** Default value for thread wake 
frequency */
-265  public static final int 
DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;
+264  /** Parameter name for how often we 
should try to write a version file, before failing */
+265  public static final String 
VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";
 266
 267  /** Parameter name for how often we 
should try to write a version file, before failing */
-268  public static final String 
VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";
+268  public static final int 
DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;
 269
-270  /** Parameter name for how often we 
should try to write a version file, before failing */
-271  public static final int 
DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;
+270  /** Parameter name for how often a 
region should should perform a major compaction */
+271  public static final String 
MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";
 272
-273  /** Parameter name for how often a 
region should should perform a major compaction */
-274  public static final String 
MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";
-275
-276  /** Parameter name for the maximum 
batch of KVs to be used in flushes and compactions */
-277  public static final String 
COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";

[41/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 9ca4097..52ebf28 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -232,23 +232,45 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Cell
+CellUtil.createFirstOnRow(byte[]row)
+
+
+static Cell
+CellUtil.createFirstOnRow(byte[]row,
+byte[]family,
+byte[]col)
+
+
+static Cell
 CellUtil.createFirstOnRow(byte[]row,
 introffset,
 shortrlength)
 
 
 static Cell
+CellUtil.createFirstOnRow(byte[]row,
+introffset,
+shortrlength,
+byte[]family,
+intfoffset,
+byteflength,
+byte[]col,
+intcoffset,
+intclength)
+
+
+static Cell
 CellUtil.createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowCol(Cellcell,
byte[]qArray,
@@ -258,13 +280,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  passed qualifier.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowColTS(Cellcell,
  longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
+
+static Cell
+CellUtil.createLastOnRow(byte[]row)
+
 
 static Cell
 CellUtil.createLastOnRow(Cellcell)
@@ -968,17 +994,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.add(Cellcell)
-Add column and value to this Append operation.
-
-
-
 Increment
 Increment.add(Cellcell)
 Add the specified KeyValue to this operation.
 
 
+
+Append
+Append.add(Cellcell)
+Add column and value to this Append operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1060,8 +1086,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Put.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 Mutation
@@ -1070,8 +1096,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 Delete
@@ -1092,6 +1118,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Cell
+ColumnPaginationFilter.getNextCellHint(Cellcell)
+
+
+Cell
+FilterList.getNextCellHint(CellcurrentCell)
+
+
 abstract Cell
 Filter.getNextCellHint(CellcurrentCell)
 If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
@@ -1100,23 +1134,25 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cellcell)
+MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
+TimestampsFilter.getNextCellHint(CellcurrentCell)
+Pick the next cell that the scanner should seek to.
+
 
 
 Cell
-FilterList.getNextCellHint(CellcurrentCell)
+FuzzyRowFilter.getNextCellHint(CellcurrentCell)
 
 
 Cell

[15/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/book.html
--
diff --git a/book.html b/book.html
index 1f99bce..d244150 100644
--- a/book.html
+++ b/book.html
@@ -1928,8 +1928,9 @@ For example:
 4.2. ZooKeeper Requirements
 
 ZooKeeper 3.4.x is required as of HBase 1.0.0.
-HBase makes use of the multi functionality that is only available 
since 3.4.0 (The useMulti configuration option defaults to 
true in HBase 1.0.0).
-See https://issues.apache.org/jira/browse/HBASE-12241;>HBASE-12241 
(The crash of regionServer when taking deadservers replication queue 
breaks replication) and https://issues.apache.org/jira/browse/HBASE-6775;>HBASE-6775 (Use 
ZK.multi when available for HBASE-6710 0.92/0.94 compatibility fix) for 
background.
+HBase makes use of the multi functionality that is only available 
since Zookeeper 3.4.0. The hbase.zookeeper.useMulti configuration 
property defaults to true in HBase 1.0.0.
+Refer to https://issues.apache.org/jira/browse/HBASE-12241;>HBASE-12241 (The crash 
of regionServer when taking deadservers replication queue breaks 
replication) and https://issues.apache.org/jira/browse/HBASE-6775;>HBASE-6775 (Use 
ZK.multi when available for HBASE-6710 0.92/0.94 compatibility fix) for 
background.
+The property is deprecated and useMulti is always enabled in HBase 2.0.
 
 
 
@@ -2221,21 +,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.bulkload.staging.dir
-
-
-Description
-A staging directory in default file system (HDFS) for bulk loading.
-
-
-Default
-${hbase.fs.tmp.dir}
-
-
-
-
 
 
 hbase.cluster.distributed
@@ -2791,21 +2777,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.zookeeper.useMulti
-
-
-Description
-Instructs HBase to make use of ZooKeepers multi-update 
functionality. This allows certain ZooKeeper operations to complete more 
quickly and prevents some issues with rare Replication failure scenarios (see 
the release note of HBASE-2611 for an example). IMPORTANT: only set this to 
true if all ZooKeeper servers in the cluster are on version 3.4+ and will not 
be downgraded. ZooKeeper versions before 3.4 do not support multi-update and 
will not fail gracefully if multi-update is invoked (see ZOOKEEPER-1495).
-
-
-Default
-true
-
-
-
-
 
 
 hbase.zookeeper.property.initLimit
@@ -4218,7 +4189,7 @@ Configuration that it is thought rare anyone would change 
can exist only in code
 
 
 Description
-FS Permissions for the root directory in a secure (kerberos) setup. When 
master starts, it creates the rootdir with this permissions or sets the 
permissions if it does not match.
+FS Permissions for the root data subdirectory in a secure (kerberos) setup. 
When master starts, it creates the rootdir with this permissions or sets the 
permissions if it does not match.
 
 
 Default
@@ -26440,9 +26411,7 @@ The following example limits the above example to 200 
MB/sec.
 
 137.9. 
Storing Snapshots in an Amazon S3 Bucket
 
-For general information and limitations of using Amazon S3 storage with 
HBase, see
-[amazon_s3_configuration]. You can also 
store and retrieve snapshots from Amazon
-S3, using the following procedure.
+You can store and retrieve snapshots from Amazon S3, using the following 
procedure.
 
 
 
@@ -30021,9 +29990,7 @@ In the example below we have ZooKeeper persist to 
/user/local/zookeeper
 
 What version of ZooKeeper should I use?
 
-The newer version, the better.
-For example, some folks have been bitten by https://issues.apache.org/jira/browse/ZOOKEEPER-1277;>ZOOKEEPER-1277.
-If running zookeeper 3.5+, you can ask hbase to make use of the new multi 
operation by enabling hbase.zookeeper.useMulti" in your 
hbase-site.xml.
+The newer version, the better. ZooKeeper 3.4.x is required as of HBase 
1.0.0
 
 
 
@@ -34050,7 +34017,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 2.0.0-SNAPSHOT
-Last updated 2016-07-24 14:31:11 +00:00
+Last updated 2016-09-29 14:44:28 +00:00
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 0a4e0ff..ea211c3 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -305,7 +305,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-09-16
+  Last Published: 
2016-09-29
 
 
 



[33/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/Counter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/Counter.html 
b/apidocs/org/apache/hadoop/hbase/util/Counter.html
index 67684f0..922a61e 100644
--- a/apidocs/org/apache/hadoop/hbase/util/Counter.html
+++ b/apidocs/org/apache/hadoop/hbase/util/Counter.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = {"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -108,10 +108,14 @@ var activeTableTab = "activeTableTab";
 
 
 
+Deprecated.
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
+
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class Counter
+ http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
+public class Counter
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 High scalable counter. Thread safe.
 
@@ -132,10 +136,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
-Counter()
+Counter()
+Deprecated.
+
 
 
-Counter(longinitValue)
+Counter(longinitValue)
+Deprecated.
+
 
 
 
@@ -147,38 +155,52 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Method Summary
 
-All MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete MethodsDeprecated Methods
 
 Modifier and Type
 Method and Description
 
 
 void
-add(longdelta)
+add(longdelta)
+Deprecated.
+
 
 
 void
-decrement()
+decrement()
+Deprecated.
+
 
 
 void
-destroy()
+destroy()
+Deprecated.
+
 
 
 long
-get()
+get()
+Deprecated.
+
 
 
 void
-increment()
+increment()
+Deprecated.
+
 
 
 void
-set(longvalue)
+set(longvalue)
+Deprecated.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-toString()
+toString()
+Deprecated.
+
 
 
 
@@ -208,7 +230,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Counter
-publicCounter()
+publicCounter()
+Deprecated.
 
 
 
@@ -217,7 +240,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Counter
-publicCounter(longinitValue)
+publicCounter(longinitValue)
+Deprecated.
 
 
 
@@ -234,7 +258,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 add
-publicvoidadd(longdelta)
+publicvoidadd(longdelta)
+Deprecated.
 
 
 
@@ -243,7 +268,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 increment
-publicvoidincrement()
+publicvoidincrement()
+Deprecated.
 
 
 
@@ -252,7 +278,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 decrement
-publicvoiddecrement()
+publicvoiddecrement()
+Deprecated.
 
 
 
@@ -261,7 +288,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 set
-publicvoidset(longvalue)
+publicvoidset(longvalue)
+Deprecated.
 
 
 
@@ -270,7 +298,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 get
-publiclongget()
+publiclongget()
+Deprecated.
 
 
 
@@ -279,7 +308,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 destroy
-publicvoiddestroy()
+publicvoiddestroy()
+Deprecated.
 
 
 
@@ -288,7 +318,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+Deprecated.
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/FastLongHistogram.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/FastLongHistogram.html 

[19/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
index 4dbb152..3d30c3b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
@@ -32,26 +32,26 @@
 024import java.io.DataInput;
 025import java.io.DataOutput;
 026import java.io.IOException;
-027import java.math.BigDecimal;
-028import java.math.BigInteger;
-029import java.nio.ByteBuffer;
-030import java.nio.charset.Charset;
-031import java.security.SecureRandom;
-032import java.util.Arrays;
-033import java.util.Collection;
-034import java.util.Comparator;
-035import java.util.Iterator;
-036import java.util.List;
-037
-038import com.google.protobuf.ByteString;
+027import 
java.io.UnsupportedEncodingException;
+028import java.math.BigDecimal;
+029import java.math.BigInteger;
+030import java.nio.ByteBuffer;
+031import java.nio.charset.Charset;
+032import 
java.nio.charset.StandardCharsets;
+033import java.security.SecureRandom;
+034import java.util.Arrays;
+035import java.util.Collection;
+036import java.util.Comparator;
+037import java.util.Iterator;
+038import java.util.List;
 039
 040import org.apache.commons.logging.Log;
 041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.KeyValue;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 047import 
org.apache.hadoop.io.RawComparator;
 048import 
org.apache.hadoop.io.WritableComparator;
 049import 
org.apache.hadoop.io.WritableUtils;
@@ -60,2598 +60,2618 @@
 052
 053import 
com.google.common.annotations.VisibleForTesting;
 054import com.google.common.collect.Lists;
-055
-056/**
-057 * Utility class that handles byte 
arrays, conversions to/from other types,
-058 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-059 * HashSets, and can be used as key in 
maps or trees.
-060 */
-061@SuppressWarnings("restriction")
-062@InterfaceAudience.Public
-063@InterfaceStability.Stable
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068  //HConstants.UTF8_ENCODING should be 
updated if this changed
-069  /** When we encode strings, we always 
specify UTF8 encoding */
-070  private static final String 
UTF8_ENCODING = "UTF-8";
-071
-072  //HConstants.UTF8_CHARSET should be 
updated if this changed
-073  /** When we encode strings, we always 
specify UTF8 encoding */
-074  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
-075
-076  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-077  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-078
-079  private static final Log LOG = 
LogFactory.getLog(Bytes.class);
+055import com.google.protobuf.ByteString;
+056
+057/**
+058 * Utility class that handles byte 
arrays, conversions to/from other types,
+059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
+060 * HashSets, and can be used as key in 
maps or trees.
+061 */
+062@SuppressWarnings("restriction")
+063@InterfaceAudience.Public
+064@InterfaceStability.Stable
+065@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+066
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
+067justification="It has been like this 
forever")
+068public class Bytes implements 
ComparableBytes {
+069  //HConstants.UTF8_ENCODING should be 
updated if this changed
+070  /** When we encode strings, we always 
specify UTF8 encoding */
+071  private static final String 
UTF8_ENCODING = "UTF-8";
+072
+073  //HConstants.UTF8_CHARSET should be 
updated if this changed
+074  /** When we encode strings, we always 
specify UTF8 encoding */
+075  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
+076
+077  // Using the charset canonical name for 
String/byte[] conversions is much
+078  // more efficient due to use of cached 
encoders/decoders.
+079  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
 080
-081  /**
-082   * Size of boolean in bytes
-083   */
-084  public static final int 

[18/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
index 4dbb152..3d30c3b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
@@ -32,26 +32,26 @@
 024import java.io.DataInput;
 025import java.io.DataOutput;
 026import java.io.IOException;
-027import java.math.BigDecimal;
-028import java.math.BigInteger;
-029import java.nio.ByteBuffer;
-030import java.nio.charset.Charset;
-031import java.security.SecureRandom;
-032import java.util.Arrays;
-033import java.util.Collection;
-034import java.util.Comparator;
-035import java.util.Iterator;
-036import java.util.List;
-037
-038import com.google.protobuf.ByteString;
+027import 
java.io.UnsupportedEncodingException;
+028import java.math.BigDecimal;
+029import java.math.BigInteger;
+030import java.nio.ByteBuffer;
+031import java.nio.charset.Charset;
+032import 
java.nio.charset.StandardCharsets;
+033import java.security.SecureRandom;
+034import java.util.Arrays;
+035import java.util.Collection;
+036import java.util.Comparator;
+037import java.util.Iterator;
+038import java.util.List;
 039
 040import org.apache.commons.logging.Log;
 041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.KeyValue;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 047import 
org.apache.hadoop.io.RawComparator;
 048import 
org.apache.hadoop.io.WritableComparator;
 049import 
org.apache.hadoop.io.WritableUtils;
@@ -60,2598 +60,2618 @@
 052
 053import 
com.google.common.annotations.VisibleForTesting;
 054import com.google.common.collect.Lists;
-055
-056/**
-057 * Utility class that handles byte 
arrays, conversions to/from other types,
-058 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-059 * HashSets, and can be used as key in 
maps or trees.
-060 */
-061@SuppressWarnings("restriction")
-062@InterfaceAudience.Public
-063@InterfaceStability.Stable
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068  //HConstants.UTF8_ENCODING should be 
updated if this changed
-069  /** When we encode strings, we always 
specify UTF8 encoding */
-070  private static final String 
UTF8_ENCODING = "UTF-8";
-071
-072  //HConstants.UTF8_CHARSET should be 
updated if this changed
-073  /** When we encode strings, we always 
specify UTF8 encoding */
-074  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
-075
-076  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-077  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-078
-079  private static final Log LOG = 
LogFactory.getLog(Bytes.class);
+055import com.google.protobuf.ByteString;
+056
+057/**
+058 * Utility class that handles byte 
arrays, conversions to/from other types,
+059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
+060 * HashSets, and can be used as key in 
maps or trees.
+061 */
+062@SuppressWarnings("restriction")
+063@InterfaceAudience.Public
+064@InterfaceStability.Stable
+065@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+066
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
+067justification="It has been like this 
forever")
+068public class Bytes implements 
ComparableBytes {
+069  //HConstants.UTF8_ENCODING should be 
updated if this changed
+070  /** When we encode strings, we always 
specify UTF8 encoding */
+071  private static final String 
UTF8_ENCODING = "UTF-8";
+072
+073  //HConstants.UTF8_CHARSET should be 
updated if this changed
+074  /** When we encode strings, we always 
specify UTF8 encoding */
+075  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
+076
+077  // Using the charset canonical name for 
String/byte[] conversions is much
+078  // more efficient due to use of cached 
encoders/decoders.
+079  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
 080
-081  /**
-082   * Size of boolean in bytes
-083   */
-084  public static final int 

[34/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/Bytes.html 
b/apidocs/org/apache/hadoop/hbase/util/Bytes.html
index ea19094..8276a12 100644
--- a/apidocs/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/org/apache/hadoop/hbase/util/Bytes.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Stable
-public class Bytes
+public class Bytes
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableBytes
 Utility class that handles byte arrays, conversions to/from 
other types,
@@ -1199,7 +1199,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_BOOLEAN
-public static finalint SIZEOF_BOOLEAN
+public static finalint SIZEOF_BOOLEAN
 Size of boolean in bytes
 
 See Also:
@@ -1213,7 +1213,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_BYTE
-public static finalint SIZEOF_BYTE
+public static finalint SIZEOF_BYTE
 Size of byte in bytes
 
 See Also:
@@ -1227,7 +1227,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_CHAR
-public static finalint SIZEOF_CHAR
+public static finalint SIZEOF_CHAR
 Size of char in bytes
 
 See Also:
@@ -1241,7 +1241,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_DOUBLE
-public static finalint SIZEOF_DOUBLE
+public static finalint SIZEOF_DOUBLE
 Size of double in bytes
 
 See Also:
@@ -1255,7 +1255,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_FLOAT
-public static finalint SIZEOF_FLOAT
+public static finalint SIZEOF_FLOAT
 Size of float in bytes
 
 See Also:
@@ -1269,7 +1269,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_INT
-public static finalint SIZEOF_INT
+public static finalint SIZEOF_INT
 Size of int in bytes
 
 See Also:
@@ -1283,7 +1283,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_LONG
-public static finalint SIZEOF_LONG
+public static finalint SIZEOF_LONG
 Size of long in bytes
 
 See Also:
@@ -1297,7 +1297,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_SHORT
-public static finalint SIZEOF_SHORT
+public static finalint SIZEOF_SHORT
 Size of short in bytes
 
 See Also:
@@ -1311,7 +1311,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 MASK_FOR_LOWER_INT_IN_LONG
-public static finallong MASK_FOR_LOWER_INT_IN_LONG
+public static finallong MASK_FOR_LOWER_INT_IN_LONG
 Mask to apply to a long to reveal the lower int only. Use 
like this:
  int i = (int)(0xL ^ some_long_value);
 
@@ -1326,7 +1326,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 ESTIMATED_HEAP_TAX
-public static finalint ESTIMATED_HEAP_TAX
+public static finalint ESTIMATED_HEAP_TAX
 Estimate of size cost to pay beyond payload in jvm for 
instance of byte [].
  Estimate based on study of jhat and jprofiler numbers.
 
@@ -1341,7 +1341,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 BYTES_COMPARATOR
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR
 Pass this to TreeMaps where byte [] are keys.
 
 
@@ -1351,7 +1351,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 BYTES_RAWCOMPARATOR
-public static finalorg.apache.hadoop.io.RawComparatorbyte[] 
BYTES_RAWCOMPARATOR
+public static finalorg.apache.hadoop.io.RawComparatorbyte[] 
BYTES_RAWCOMPARATOR
 Use comparing byte arrays, byte-by-byte
 
 
@@ -1369,7 +1369,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Bytes
-publicBytes()
+publicBytes()
 Create a zero-size sequence.
 
 
@@ -1379,7 +1379,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Bytes
-publicBytes(byte[]bytes)
+publicBytes(byte[]bytes)
 Create a Bytes using the byte array as the initial 
value.
 
 Parameters:
@@ -1393,7 +1393,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Bytes
-publicBytes(Bytesibw)
+publicBytes(Bytesibw)
 Set the new Bytes to the contents of the passed
  ibw.
 
@@ -1408,7 +1408,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 

[26/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
index da95361..72920ba 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
@@ -283,666 +283,678 @@
 275return result;
 276  }
 277
-278  protected int binarySearch(final Cell 
[] kvs,
-279 final byte 
[] family,
-280 final byte 
[] qualifier) {
-281Cell searchTerm =
-282
KeyValueUtil.createFirstOnRow(CellUtil.cloneRow(kvs[0]),
-283family, qualifier);
-284
-285// pos === ( -(insertion point) - 
1)
-286int pos = Arrays.binarySearch(kvs, 
searchTerm, CellComparator.COMPARATOR);
-287// never will exact match
-288if (pos  0) {
-289  pos = (pos+1) * -1;
-290  // pos is now insertion point
-291}
-292if (pos == kvs.length) {
-293  return -1; // doesn't exist
-294}
-295return pos;
-296  }
-297
-298  /**
-299   * Searches for the latest value for 
the specified column.
-300   *
-301   * @param kvs the array to search
-302   * @param family family name
-303   * @param foffset family offset
-304   * @param flength family length
-305   * @param qualifier column qualifier
-306   * @param qoffset qualifier offset
-307   * @param qlength qualifier length
-308   *
-309   * @return the index where the value 
was found, or -1 otherwise
-310   */
-311  protected int binarySearch(final Cell 
[] kvs,
-312  final byte [] family, final int 
foffset, final int flength,
-313  final byte [] qualifier, final int 
qoffset, final int qlength) {
-314
-315double keyValueSize = (double)
-316
KeyValue.getKeyValueDataStructureSize(kvs[0].getRowLength(), flength, qlength, 
0);
-317
-318byte[] buffer = localBuffer.get();
-319if (buffer == null || keyValueSize 
 buffer.length) {
-320  // pad to the smallest multiple of 
the pad width
-321  buffer = new byte[(int) 
Math.ceil(keyValueSize / PAD_WIDTH) * PAD_WIDTH];
-322  localBuffer.set(buffer);
-323}
-324
-325Cell searchTerm = 
KeyValueUtil.createFirstOnRow(buffer, 0,
-326kvs[0].getRowArray(), 
kvs[0].getRowOffset(), kvs[0].getRowLength(),
-327family, foffset, flength,
-328qualifier, qoffset, qlength);
+278  private byte[] notNullBytes(final 
byte[] bytes) {
+279if (bytes == null) {
+280  return 
HConstants.EMPTY_BYTE_ARRAY;
+281} else {
+282  return bytes;
+283}
+284  }
+285
+286  protected int binarySearch(final Cell 
[] kvs,
+287 final byte 
[] family,
+288 final byte 
[] qualifier) {
+289byte[] familyNotNull = 
notNullBytes(family);
+290byte[] qualifierNotNull = 
notNullBytes(qualifier);
+291Cell searchTerm =
+292
CellUtil.createFirstOnRow(kvs[0].getRowArray(),
+293kvs[0].getRowOffset(), 
kvs[0].getRowLength(),
+294familyNotNull, 0, 
(byte)familyNotNull.length,
+295qualifierNotNull, 0, 
qualifierNotNull.length);
+296
+297// pos === ( -(insertion point) - 
1)
+298int pos = Arrays.binarySearch(kvs, 
searchTerm, CellComparator.COMPARATOR);
+299// never will exact match
+300if (pos  0) {
+301  pos = (pos+1) * -1;
+302  // pos is now insertion point
+303}
+304if (pos == kvs.length) {
+305  return -1; // doesn't exist
+306}
+307return pos;
+308  }
+309
+310  /**
+311   * Searches for the latest value for 
the specified column.
+312   *
+313   * @param kvs the array to search
+314   * @param family family name
+315   * @param foffset family offset
+316   * @param flength family length
+317   * @param qualifier column qualifier
+318   * @param qoffset qualifier offset
+319   * @param qlength qualifier length
+320   *
+321   * @return the index where the value 
was found, or -1 otherwise
+322   */
+323  protected int binarySearch(final Cell 
[] kvs,
+324  final byte [] family, final int 
foffset, final int flength,
+325  final byte [] qualifier, final int 
qoffset, final int qlength) {
+326
+327double keyValueSize = (double)
+328
KeyValue.getKeyValueDataStructureSize(kvs[0].getRowLength(), flength, qlength, 
0);
 329
-330// pos === ( -(insertion point) - 
1)
-331int pos = Arrays.binarySearch(kvs, 
searchTerm, CellComparator.COMPARATOR);
-332// never will exact match
-333if (pos  0) {
-334  pos = (pos+1) * -1;
-335  // pos is now insertion point
-336}
-337if (pos == kvs.length) {
-338  return -1; // doesn't exist
-339}
-340return pos;
-341  }
-342
-343  /**
-344   * The Cell for the most recent 
timestamp for a given column.
-345   *
-346   * @param family

[03/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index e6652c7..bdd073e 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -190,24 +190,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+Staging dir used by bulk load
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 The name of the configuration parameter that specifies
  the number of bytes in a newly created checksum chunk.
 
 
-
+
 static byte[]
 CATALOG_FAMILY
 The catalog family
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 The catalog family as a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 The name of the configuration parameter that specifies
@@ -215,668 +221,668 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  for newly created blocks.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 Default cipher for encryption
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 The ZK client port key in the ZK properties map.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 Cluster is in distributed mode or not
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 Default value for cluster ID
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 name of the file for unique cluster ID
 
 
-
+
 static boolean
 CLUSTER_IS_DISTRIBUTED
 Cluster is fully-distributed
 
 
-
+
 static boolean
 CLUSTER_IS_LOCAL
 Cluster is standalone or pseudo-distributed
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 Parameter name for the maximum batch of KVs to be used in 
flushes and compactions
 
 
-
+
 static int
 COMPACTION_KV_MAX_DEFAULT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PARAM_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
 
  Pattern that matches a coprocessor specification.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 Configuration key for the name of the alternate cipher 
algorithm for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 

[20/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
index 006f111..e954828 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
@@ -25,1013 +25,1022 @@
 017package org.apache.hadoop.hbase.util;
 018
 019import java.io.ByteArrayOutputStream;
-020import java.io.DataInputStream;
-021import java.io.IOException;
-022import java.io.InputStream;
-023import java.io.OutputStream;
-024import java.math.BigDecimal;
-025import java.math.BigInteger;
-026import java.nio.ByteBuffer;
-027import java.util.Arrays;
-028
-029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-031import 
org.apache.hadoop.hbase.io.ByteBufferSupportOutputStream;
-032import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-033import org.apache.hadoop.io.IOUtils;
-034import 
org.apache.hadoop.io.WritableUtils;
-035
-036import sun.nio.ch.DirectBuffer;
-037
-038/**
-039 * Utility functions for working with 
byte buffers, such as reading/writing
-040 * variable-length long numbers.
-041 */
-042@SuppressWarnings("restriction")
-043@InterfaceAudience.Public
-044@InterfaceStability.Evolving
-045public final class ByteBufferUtils {
-046
-047  // "Compressed integer" serialization 
helper constants.
-048  public final static int VALUE_MASK = 
0x7f;
-049  public final static int NEXT_BIT_SHIFT 
= 7;
-050  public final static int NEXT_BIT_MASK = 
1  7;
-051  private static final boolean 
UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
-052  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-053
-054  private ByteBufferUtils() {
-055  }
-056
-057  /**
-058   * Similar to {@link 
WritableUtils#writeVLong(java.io.DataOutput, long)},
-059   * but writes to a {@link 
ByteBuffer}.
-060   */
-061  public static void 
writeVLong(ByteBuffer out, long i) {
-062if (i = -112  i = 
127) {
-063  out.put((byte) i);
-064  return;
-065}
-066
-067int len = -112;
-068if (i  0) {
-069  i ^= -1L; // take one's 
complement
-070  len = -120;
-071}
-072
-073long tmp = i;
-074while (tmp != 0) {
-075  tmp = tmp  8;
-076  len--;
-077}
-078
-079out.put((byte) len);
-080
-081len = (len  -120) ? -(len + 120) 
: -(len + 112);
-082
-083for (int idx = len; idx != 0; idx--) 
{
-084  int shiftbits = (idx - 1) * 8;
-085  long mask = 0xFFL  
shiftbits;
-086  out.put((byte) ((i  mask) 
 shiftbits));
-087}
-088  }
-089
-090  /**
-091   * Similar to {@link 
WritableUtils#readVLong(DataInput)} but reads from a
-092   * {@link ByteBuffer}.
-093   */
-094  public static long readVLong(ByteBuffer 
in) {
-095byte firstByte = in.get();
-096int len = 
WritableUtils.decodeVIntSize(firstByte);
-097if (len == 1) {
-098  return firstByte;
-099}
-100long i = 0;
-101for (int idx = 0; idx  len-1; 
idx++) {
-102  byte b = in.get();
-103  i = i  8;
-104  i = i | (b  0xFF);
-105}
-106return 
(WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
-107  }
-108
+020import java.io.DataInput;
+021import java.io.DataInputStream;
+022import java.io.IOException;
+023import java.io.InputStream;
+024import java.io.OutputStream;
+025import java.math.BigDecimal;
+026import java.math.BigInteger;
+027import java.nio.ByteBuffer;
+028import java.util.Arrays;
+029
+030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+031import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+032import 
org.apache.hadoop.hbase.io.ByteBufferSupportOutputStream;
+033import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+034import org.apache.hadoop.io.IOUtils;
+035import 
org.apache.hadoop.io.WritableUtils;
+036
+037import sun.nio.ch.DirectBuffer;
+038
+039/**
+040 * Utility functions for working with 
byte buffers, such as reading/writing
+041 * variable-length long numbers.
+042 */
+043@SuppressWarnings("restriction")
+044@InterfaceAudience.Public
+045@InterfaceStability.Evolving
+046public final class ByteBufferUtils {
+047
+048  // "Compressed integer" serialization 
helper constants.
+049  public final static int VALUE_MASK = 
0x7f;
+050  public final static int NEXT_BIT_SHIFT 
= 7;
+051  public final static int NEXT_BIT_MASK = 
1  7;
+052  private static final boolean 
UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
+053  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
+054
+055  private ByteBufferUtils() {
+056  }
+057
+058  /**
+059   * Similar to {@link 
WritableUtils#writeVLong(java.io.DataOutput, long)},
+060   * but writes to a {@link 

[42/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index a4ba957..a86c3ae 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -263,7 +263,7 @@ the order they are declared.
 
 
 values
-public staticKeepDeletedCells[]values()
+public staticKeepDeletedCells[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -283,7 +283,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html 
b/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
index 4a34db0..b3b2829 100644
--- a/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
+++ b/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
@@ -171,6 +171,10 @@ extends Constructor
 
 
+
+UnknownScannerException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
+
 
 
 
@@ -221,7 +225,7 @@ extends 
 
 
-
+
 
 UnknownScannerException
 publicUnknownScannerException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings)
@@ -232,6 +236,16 @@ extends 
 
 
+
+
+
+
+
+UnknownScannerException
+publicUnknownScannerException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
+
+
 
 
 



[13/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 68ccf4a..39d2b20 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2016 The Apache Software Foundation
 
-  File: 1845,
- Errors: 11576,
+  File: 1850,
+ Errors: 11585,
  Warnings: 0,
  Infos: 0
   
@@ -125,7 +125,7 @@ under the License.
   0
 
 
-  55
+  54
 
   
   
@@ -480,7 +480,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MasterKeepAliveConnection.java;>org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.HBaseSnapshotException.java;>org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
 
 
   0
@@ -494,7 +494,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.HBaseSnapshotException.java;>org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MasterKeepAliveConnection.java;>org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 
 
   0
@@ -755,7 +755,7 @@ under the License.
   0
 
 
-  5
+  4
 
   
   
@@ -1026,7 +1026,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TableSnapshotScanner.java;>org/apache/hadoop/hbase/client/TableSnapshotScanner.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.wal.FSHLogProvider.java;>org/apache/hadoop/hbase/wal/FSHLogProvider.java
 
 
   0
@@ -1035,12 +1035,12 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.wal.FSHLogProvider.java;>org/apache/hadoop/hbase/wal/FSHLogProvider.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TableSnapshotScanner.java;>org/apache/hadoop/hbase/client/TableSnapshotScanner.java
 
 
   0
@@ -1049,7 +1049,7 @@ under the License.
   0
 
 
-  0
+  4
 
   
   
@@ -1567,7 +1567,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -2505,7 +2505,7 @@ under the License.
   0
 
 
-  203
+  211
 
   
   
@@ -2636,7 +2636,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.CompactionPipeline.java;>org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.PutSortReducer.java;>org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
 
 
   0
@@ -2645,12 +2645,12 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.PutSortReducer.java;>org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.CompactionPipeline.java;>org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
 
 
   0
@@ -2659,7 +2659,7 @@ under the License.
   0
 
 
-  2
+  1
 
   

[31/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/util/package-summary.html
index 992912b..d27ecac 100644
--- a/apidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -171,8 +171,8 @@
 
 
 Counter
-
-High scalable counter.
+Deprecated
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/overview-frame.html
--
diff --git a/apidocs/overview-frame.html b/apidocs/overview-frame.html
index bd42c17..8f2ef06 100644
--- a/apidocs/overview-frame.html
+++ b/apidocs/overview-frame.html
@@ -41,6 +41,7 @@
 org.apache.hadoop.hbase.io.hfile
 org.apache.hadoop.hbase.io.util
 org.apache.hadoop.hbase.ipc
+org.apache.hadoop.hbase.jetty
 org.apache.hadoop.hbase.mapred
 org.apache.hadoop.hbase.mapreduce
 org.apache.hadoop.hbase.master

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/overview-summary.html
--
diff --git a/apidocs/overview-summary.html b/apidocs/overview-summary.html
index c94078e..30656ad 100644
--- a/apidocs/overview-summary.html
+++ b/apidocs/overview-summary.html
@@ -212,100 +212,104 @@
 
 
 
+org.apache.hadoop.hbase.jetty
+
+
+
 org.apache.hadoop.hbase.mapred
 
 Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
 Input/OutputFormats, a table indexing MapReduce job, and utility methods.
 
 
-
+
 org.apache.hadoop.hbase.mapreduce
 
 Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
 Input/OutputFormats, a table indexing MapReduce job, and utility methods.
 
 
-
+
 org.apache.hadoop.hbase.master
 
 
-
+
 org.apache.hadoop.hbase.mob
 
 
-
+
 org.apache.hadoop.hbase.mob.compactions
 
 
-
+
 org.apache.hadoop.hbase.mob.mapreduce
 
 
-
+
 org.apache.hadoop.hbase.namespace
 
 
-
+
 org.apache.hadoop.hbase.nio
 
 
-
+
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher
 
 
-
+
 org.apache.hadoop.hbase.regionserver.throttle
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.spark
 
 
-
+
 org.apache.hadoop.hbase.spark.example.hbasecontext
 
 
-
+
 org.apache.hadoop.hbase.types
 
 
@@ -313,23 +317,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  extensible data type API.
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.util.hbck
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
-
+
 org.apache.hbase.archetypes.exemplars.client
 
 This package provides fully-functional exemplar Java code 
demonstrating
@@ -337,7 +341,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  archetype with hbase-client dependency.
 
 
-
+
 org.apache.hbase.archetypes.exemplars.shaded_client
 
 This package provides fully-functional exemplar Java code 
demonstrating

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 27872c5..b4b03d2 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -101,6 +101,7 @@
 org.apache.hadoop.hbase.io.hfile,
 
 org.apache.hadoop.hbase.io.util,
 
 org.apache.hadoop.hbase.ipc,
 
+org.apache.hadoop.hbase.jetty,
 
 org.apache.hadoop.hbase.mapred,
 
 org.apache.hadoop.hbase.mapreduce,
 
 org.apache.hadoop.hbase.master,
 
@@ -586,6 +587,7 @@
 org.apache.hadoop.hbase.quotas.ThrottlingException
 
 
+org.apache.hadoop.hbase.exceptions.ScannerResetException
 org.apache.hadoop.hbase.client.ScannerTimeoutException
 org.apache.hadoop.hbase.ipc.ServerTooBusyException
 org.apache.hadoop.hbase.TableExistsException
@@ -848,23 +850,23 @@
 org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
+org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp

[23/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
index 480107e..070e398 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
@@ -33,120 +33,120 @@
 025import org.apache.hadoop.hbase.Cell;
 026import 
org.apache.hadoop.hbase.CellUtil;
 027import 
org.apache.hadoop.hbase.HConstants;
-028import 
org.apache.hadoop.hbase.KeyValueUtil;
-029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-033import 
org.apache.hadoop.hbase.util.ByteStringer;
-034import 
org.apache.hadoop.hbase.util.Bytes;
-035
-036import 
com.google.protobuf.InvalidProtocolBufferException;
-037
-038/**
-039 * Filter to support scan multiple row 
key ranges. It can construct the row key ranges from the
-040 * passed list which can be accessed by 
each region server.
-041 *
-042 * HBase is quite efficient when scanning 
only one small row key range. If user needs to specify
-043 * multiple row key ranges in one scan, 
the typical solutions are: 1. through FilterList which is a
-044 * list of row key Filters, 2. using the 
SQL layer over HBase to join with two table, such as hive,
-045 * phoenix etc. However, both solutions 
are inefficient. Both of them can't utilize the range info
-046 * to perform fast forwarding during scan 
which is quite time consuming. If the number of ranges
-047 * are quite big (e.g. millions), join is 
a proper solution though it is slow. However, there are
-048 * cases that user wants to specify a 
small number of ranges to scan (e.g. lt;1000 ranges). Both
-049 * solutions can't provide satisfactory 
performance in such case. MultiRowRangeFilter is to support
-050 * such usec ase (scan multiple row key 
ranges), which can construct the row key ranges from user
-051 * specified list and perform 
fast-forwarding during scan. Thus, the scan will be quite efficient.
-052 */
-053@InterfaceAudience.Public
-054@InterfaceStability.Evolving
-055public class MultiRowRangeFilter extends 
FilterBase {
-056
-057  private ListRowRange 
rangeList;
-058
-059  private static final int 
ROW_BEFORE_FIRST_RANGE = -1;
-060  private boolean EXCLUSIVE = false;
-061  private boolean done = false;
-062  private boolean initialized = false;
-063  private int index;
-064  private RowRange range;
-065  private ReturnCode currentReturnCode;
-066
-067  /**
-068   * @param list A list of 
codeRowRange/code
-069   * @throws java.io.IOException
-070   *   throw an exception if the 
range list is not in an natural order or any
-071   *   
codeRowRange/code is invalid
-072   */
-073  public 
MultiRowRangeFilter(ListRowRange list) throws IOException {
-074this.rangeList = 
sortAndMerge(list);
-075  }
-076
-077  @Override
-078  public boolean filterAllRemaining() {
-079return done;
-080  }
-081
-082  public ListRowRange 
getRowRanges() {
-083return this.rangeList;
-084  }
-085
-086  @Override
-087  public boolean filterRowKey(Cell 
firstRowCell) {
-088if (filterAllRemaining()) return 
true;
-089// If it is the first time of 
running, calculate the current range index for
-090// the row key. If index is out of 
bound which happens when the start row
-091// user sets is after the largest 
stop row of the ranges, stop the scan.
-092// If row key is after the current 
range, find the next range and update index.
-093byte[] rowArr = 
firstRowCell.getRowArray();
-094int length = 
firstRowCell.getRowLength();
-095int offset = 
firstRowCell.getRowOffset();
-096if (!initialized
-097|| !range.contains(rowArr, 
offset, length)) {
-098  byte[] rowkey = 
CellUtil.cloneRow(firstRowCell);
-099  index = 
getNextRangeIndex(rowkey);
-100  if (index = rangeList.size()) 
{
-101done = true;
-102currentReturnCode = 
ReturnCode.NEXT_ROW;
-103return false;
-104  }
-105  if(index != ROW_BEFORE_FIRST_RANGE) 
{
-106range = rangeList.get(index);
-107  } else {
-108range = rangeList.get(0);
-109  }
-110  if (EXCLUSIVE) {
-111EXCLUSIVE = false;
-112currentReturnCode = 
ReturnCode.NEXT_ROW;
-113return false;
-114  }
-115  if (!initialized) {
-116if(index != 
ROW_BEFORE_FIRST_RANGE) {
-117  currentReturnCode = 
ReturnCode.INCLUDE;
-118} else {
-119  currentReturnCode = 
ReturnCode.SEEK_NEXT_USING_HINT;
-120}
-121

[35/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html 
b/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
index 99a1e7d..5c7a2b6 100644
--- a/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
+++ b/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class ReplicationPeerConfig
+public class ReplicationPeerConfig
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 A configuration for the replication peer cluster.
 
@@ -158,35 +158,43 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 getConfiguration()
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getNamespaces()
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Mapbyte[],byte[]
 getPeerData()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getReplicationEndpointImpl()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getTableCFsMap()
 
-
+
 ReplicationPeerConfig
 setClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
 Set the clusterKey which is the concatenation of the slave 
cluster's:
   
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
 
 
-
+
+ReplicationPeerConfig
+setNamespaces(http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringnamespaces)
+
+
 ReplicationPeerConfig
 setReplicationEndpointImpl(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringreplicationEndpointImpl)
 Sets the ReplicationEndpoint plugin class for this 
peer.
 
 
-
+
 ReplicationPeerConfig
 setTableCFsMap(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCFsMap)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
@@ -218,7 +226,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ReplicationPeerConfig
-publicReplicationPeerConfig()
+publicReplicationPeerConfig()
 
 
 
@@ -235,7 +243,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setClusterKey
-publicReplicationPeerConfigsetClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
+publicReplicationPeerConfigsetClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
 Set the clusterKey which is the concatenation of the slave 
cluster's:
   
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
 
@@ -246,7 +254,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setReplicationEndpointImpl

[02/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/JMXListener.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/JMXListener.html 
b/devapidocs/org/apache/hadoop/hbase/JMXListener.html
index f9a3a87..4f61da7 100644
--- a/devapidocs/org/apache/hadoop/hbase/JMXListener.html
+++ b/devapidocs/org/apache/hadoop/hbase/JMXListener.html
@@ -183,6 +183,10 @@ implements static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RMI_REGISTRY_PORT_CONF_KEY
 
+
+private http://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true;
 title="class or interface in java.rmi.registry">Registry
+rmiRegistry
+
 
 
 
@@ -330,7 +334,7 @@ implements 
 
 
-
+
 
 JMX_CS
 private statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXConnectorServer.html?is-external=true;
 title="class or interface in javax.management.remote">JMXConnectorServer 
JMX_CS
@@ -340,6 +344,15 @@ implements 
 
 
+
+
+
+
+
+rmiRegistry
+privatehttp://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true;
 title="class or interface in java.rmi.registry">Registry rmiRegistry
+
+
 
 
 
@@ -371,7 +384,7 @@ implements 
 
 buildJMXServiceURL
-public statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true;
 title="class or interface in 
javax.management.remote">JMXServiceURLbuildJMXServiceURL(intrmiRegistryPort,
+public statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true;
 title="class or interface in 
javax.management.remote">JMXServiceURLbuildJMXServiceURL(intrmiRegistryPort,
intrmiConnectorPort)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -386,7 +399,7 @@ implements 
 
 startConnectorServer
-publicvoidstartConnectorServer(intrmiRegistryPort,
+publicvoidstartConnectorServer(intrmiRegistryPort,
  intrmiConnectorPort)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -401,7 +414,7 @@ implements 
 
 stopConnectorServer
-publicvoidstopConnectorServer()
+publicvoidstopConnectorServer()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -415,7 +428,7 @@ implements 
 
 start
-publicvoidstart(CoprocessorEnvironmentenv)
+publicvoidstart(CoprocessorEnvironmentenv)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -431,7 +444,7 @@ implements 
 
 stop
-publicvoidstop(CoprocessorEnvironmentenv)
+publicvoidstop(CoprocessorEnvironmentenv)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index 965a0e1..1aeb9d8 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -263,7 +263,7 @@ the order they are declared.
 
 
 values
-public staticKeepDeletedCells[]values()
+public staticKeepDeletedCells[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -283,7 +283,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html
--
diff --git 

[50/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index 381cd85..4879e2d 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) ACID Properties
@@ -600,7 +600,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-09-16
+  Last Published: 
2016-09-29
 
 
 



[47/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 4c60407..07faf6f 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -493,1923 +493,1923 @@
 "hbase.bulkload.retries.number"
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+"staging"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 "hbase.hstore.bytes.per.checksum"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 "info"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 "hbase.hstore.checksum.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 "AES"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 "clientPort"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 "hbase.cluster.distributed"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 "default-cluster"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 "hbase.id"
 
-
+
 
 
 publicstaticfinalboolean
 CLUSTER_IS_DISTRIBUTED
 true
 
-
+
 
 
 publicstaticfinalboolean
 CLUSTER_IS_LOCAL
 false
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 "hbase.hstore.compaction.kv.max"
 
-
+
 
 
 publicstaticfinalint
 COMPACTION_KV_MAX_DEFAULT
 10
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 "CONFIGURATION"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 "corrupt"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 "hbase.coprocessor.classloader.included.classes"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 "[^=,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 "[^,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.alternate.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 "hbase.crypto.cipherprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 "hbase.crypto.keyprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 "hbase.crypto.keyprovider.parameters"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 "hbase.crypto.master.alternate.key.name"
 
-
+
 
 
 

[08/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html 
b/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
index 192136d..a1bd0d3 100644
--- a/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 Direct Known Subclasses:
-BufferedDataBlockEncoder.OffheapDecodedCell,
 ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, OffheapKeyValue, PrefixTreeCell, Pre
 fixTreeSeeker.OffheapPrefixTreeCell
+BufferedDataBlockEncoder.OffheapDecodedCell,
 ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, KeyOnlyFilter.KeyOnlyByteBufferedCell, OffheapKeyValue, P
 refixTreeCell, PrefixTreeSeeker.OffheapPrefixTreeCell
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/Cell.html
index e1c2ccb..d719a38 100644
--- a/devapidocs/org/apache/hadoop/hbase/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/Cell.html
@@ -100,8 +100,12 @@ var activeTableTab = "activeTableTab";
 
 
 
+All Known Subinterfaces:
+ExtendedCell
+
+
 All Known Implementing Classes:
-BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 ByteBufferedCell, ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, CellUtil.EmptyCell,
  CellUtil.FirstOnRowByteBufferedCell, CellUtil.FirstOnRowCell, CellUtil.FirstOnRowColByteBufferedCell, CellUtil.FirstOnRowColCell, CellUtil.FirstOnRowColTSByteBufferedCell, CellUtil.FirstOnRowColTSCell, CellUtil.FirstOnRowDeleteFamilyCell, CellUtil.LastOnRowByteBufferedCell, CellUtil.LastOnRowCell, CellUtil.LastOnRowColByteBufferedCell, CellUtil.LastOnRowColCell, CellUtil.ShareableMemoryTagRewriteCell, CellUtil.TagRewriteCell, KeyValue, KeyValue.KeyOnlyKeyValue, KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryKeyValue,
 KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryNoTagsKeyValue,
 NoTagsKeyValue, OffheapKeyValue, PrefixTreeArrayReversibleScanner,
 PrefixTreeArrayScanner, PrefixTreeArraySearcher, 
PrefixTreeCell, PrefixTreeSeeker.OffheapPrefixTreeCell,
 PrefixTreeSeeker.OnheapPrefixTreeCell,
 RedundantKVGenerator.ExtendedOffheapKeyValue,
 SizeCachedKeyValue, SizeCachedNoTagsKeyValue
+BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 ByteBufferedCell, ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, CellUtil.EmptyCell,
  CellUtil.FirstOnRowByteBufferedCell, CellUtil.FirstOnRowCell, CellUtil.FirstOnRowColByteBufferedCell, CellUtil.FirstOnRowColCell, CellUtil.FirstOnRowColTSByteBufferedCell, CellUtil.FirstOnRowColTSCell, CellUtil.FirstOnRowDeleteFamilyCell, CellUtil.LastOnRowByteBufferedCell, CellUtil.LastOnRowCell, CellUtil.LastOnRowColByteBufferedCell, CellUtil.LastOnRowColCell, CellUtil.ShareableMemoryTagRewriteCell, CellUtil.TagRewriteCell, KeyOnlyFilter.KeyOnlyByteBufferedCell, KeyOnlyFilter.KeyOnlyCell, 
KeyValue, KeyValue.KeyOnlyKeyValue, KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryKeyValue,
 KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryNoTagsKeyValue,
 NoTagsKeyValue, OffheapKeyValue, PrefixTreeArrayReversibleScanner,
 PrefixTreeArrayScanner, PrefixTreeArraySearcher, PrefixTreeCell, PrefixTreeSeeker.OffheapPrefixTreeCell,
 PrefixTreeSeeker.OnheapPrefixTreeCell,
 RedundantKVGenerator.ExtendedOffheapKeyValue,
 SizeCachedKeyValue,
  SizeCachedNoTagsKeyValue
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
index 1326054..d7e71be 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private abstract static class CellUtil.EmptyByteBufferedCell
+private abstract static class CellUtil.EmptyByteBufferedCell
 extends ByteBufferedCell
 implements SettableSequenceId
 
@@ -323,7 +323,7 @@ implements 
 
 EmptyByteBufferedCell
-privateEmptyByteBufferedCell()
+privateEmptyByteBufferedCell()
 
 
 
@@ -340,7 +340,7 @@ implements 
 
 setSequenceId
-publicvoidsetSequenceId(longseqId)
+publicvoidsetSequenceId(longseqId)
 Description copied from 
interface:SettableSequenceId
 Sets with the given seqId.
 
@@ -355,7 

  1   2   3   4   >