[14/51] [partial] hbase-site git commit: Published site at 620d70d6186fb800299bcc62ad7179fccfd1be41.

2019-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateNode.AssignmentProcedureEvent.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateNode.AssignmentProcedureEvent.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateNode.AssignmentProcedureEvent.html
index 550d833..d664e99 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateNode.AssignmentProcedureEvent.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateNode.AssignmentProcedureEvent.html
@@ -154,171 +154,173 @@
 146}
 147  }
 148
-149  public boolean isInState(final State... 
expected) {
-150if (expected != null  
expected.length  0) {
-151  boolean expectedState = false;
-152  for (int i = 0; i  
expected.length; ++i) {
-153expectedState |= (getState() == 
expected[i]);
-154  }
-155  return expectedState;
-156}
-157return true;
-158  }
-159
-160  public boolean isStuck() {
-161return isInState(State.FAILED_OPEN) 
 getProcedure() != null;
-162  }
-163
-164  public boolean isInTransition() {
-165return getProcedure() != null;
-166  }
-167
-168  public long getLastUpdate() {
-169TransitRegionStateProcedure proc = 
this.procedure;
-170return proc != null ? 
proc.getLastUpdate() : lastUpdate;
-171  }
-172
-173  public void setLastHost(final 
ServerName serverName) {
-174this.lastHost = serverName;
-175  }
-176
-177  public void setOpenSeqNum(final long 
seqId) {
-178this.openSeqNum = seqId;
-179  }
-180
-181  public ServerName 
setRegionLocation(final ServerName serverName) {
-182ServerName lastRegionLocation = 
this.regionLocation;
-183if (LOG.isTraceEnabled()  
serverName == null) {
-184  LOG.trace("Tracking when we are set 
to null " + this, new Throwable("TRACE"));
-185}
-186this.regionLocation = serverName;
-187this.lastUpdate = 
EnvironmentEdgeManager.currentTime();
-188return lastRegionLocation;
-189  }
-190
-191  public void 
setProcedure(TransitRegionStateProcedure proc) {
-192assert this.procedure == null;
-193this.procedure = proc;
-194ritMap.put(regionInfo, this);
-195  }
-196
-197  public void 
unsetProcedure(TransitRegionStateProcedure proc) {
-198assert this.procedure == proc;
-199this.procedure = null;
-200ritMap.remove(regionInfo, this);
-201  }
-202
-203  public TransitRegionStateProcedure 
getProcedure() {
-204return procedure;
-205  }
-206
-207  public ProcedureEvent? 
getProcedureEvent() {
-208return event;
-209  }
-210
-211  public RegionInfo getRegionInfo() {
-212return regionInfo;
-213  }
-214
-215  public TableName getTable() {
-216return getRegionInfo().getTable();
-217  }
-218
-219  public boolean isSystemTable() {
-220return getTable().isSystemTable();
-221  }
-222
-223  public ServerName getLastHost() {
-224return lastHost;
-225  }
-226
-227  public ServerName getRegionLocation() 
{
-228return regionLocation;
-229  }
-230
-231  public State getState() {
-232return state;
-233  }
-234
-235  public long getOpenSeqNum() {
-236return openSeqNum;
-237  }
-238
-239  public int getFormatVersion() {
-240// we don't have any format for now
-241// it should probably be in 
regionInfo.getFormatVersion()
-242return 0;
-243  }
-244
-245  public RegionState toRegionState() {
-246return new 
RegionState(getRegionInfo(), getState(), getLastUpdate(), 
getRegionLocation());
-247  }
-248
-249  @Override
-250  public int compareTo(final 
RegionStateNode other) {
-251// NOTE: RegionInfo sort by table 
first, so we are relying on that.
-252// we have a 
TestRegionState#testOrderedByTable() that check for that.
-253return 
RegionInfo.COMPARATOR.compare(getRegionInfo(), other.getRegionInfo());
-254  }
-255
-256  @Override
-257  public int hashCode() {
-258return getRegionInfo().hashCode();
-259  }
-260
-261  @Override
-262  public boolean equals(final Object 
other) {
-263if (this == other) {
-264  return true;
-265}
-266if (!(other instanceof 
RegionStateNode)) {
-267  return false;
-268}
-269return compareTo((RegionStateNode) 
other) == 0;
-270  }
-271
-272  @Override
-273  public String toString() {
-274return toDescriptiveString();
-275  }
-276
-277  public String toShortString() {
-278// rit= is the current 
Region-In-Transition State -- see State enum.
-279return String.format("rit=%s, 
location=%s", getState(), getRegionLocation());
-280  }
-281
-282  public String toDescriptiveString() {
-283return String.format("%s, table=%s, 
region=%s", toShortString(), getTable(),
-284  
getRegionInfo().getEncodedName());
-285  }
-286
-287  public void checkOnline() throws 
DoNotRetryRegionException {
-288RegionInfo ri = getRegionInfo();

[14/51] [partial] hbase-site git commit: Published site at 281d6429e55149cc4c05430dcc1d1dc136d8b245.

2019-01-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/master/procedure/SwitchRpcThrottleRemoteProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/SwitchRpcThrottleRemoteProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/SwitchRpcThrottleRemoteProcedure.html
new file mode 100644
index 000..501ddc6
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/SwitchRpcThrottleRemoteProcedure.html
@@ -0,0 +1,777 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+SwitchRpcThrottleRemoteProcedure (Apache HBase 3.0.0-SNAPSHOT 
API)
+
+
+
+
+
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.master.procedure
+Class 
SwitchRpcThrottleRemoteProcedure
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureMasterProcedureEnv
+
+
+org.apache.hadoop.hbase.master.procedure.SwitchRpcThrottleRemoteProcedure
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableProcedureMasterProcedureEnv, ServerProcedureInterface, RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,ServerName
+
+
+
+@InterfaceAudience.Private
+public class SwitchRpcThrottleRemoteProcedure
+extends ProcedureMasterProcedureEnv
+implements RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,ServerName, ServerProcedureInterface
+The procedure to switch rpc throttle on region server
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+Procedure.LockState
+
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hadoop.hbase.master.procedure.ServerProcedureInterface
+ServerProcedureInterface.ServerOperationType
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private boolean
+dispatched
+
+
+private ProcedureEvent?
+event
+
+
+private static org.slf4j.Logger
+LOG
+
+
+private boolean
+rpcThrottleEnabled
+
+
+private boolean
+succ
+
+
+private ServerName
+targetServer
+
+
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.procedure2.Procedure
+NO_PROC_ID,
 NO_TIMEOUT
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+SwitchRpcThrottleRemoteProcedure()
+
+
+SwitchRpcThrottleRemoteProcedure(ServerNameserverName,
+
booleanrpcThrottleEnabled)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected boolean
+abort(MasterProcedureEnvenv)
+The abort() call is asynchronous and each procedure must 
decide how to deal
+ with it, if they want to be abortable.
+
+
+
+private void
+complete(MasterProcedureEnvenv,
+https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 
java.lang">Throwableerror)
+
+
+protected void
+deserializeStateData(ProcedureStateSerializerserializer)
+Called on store load to allow the user to decode the 
previously serialized
+ state.
+
+
+
+protected ProcedureMasterProcedureEnv[]
+execute(MasterProcedureEnvenv)
+The main code of the procedure.
+
+
+
+ServerName
+getServerName()
+
+
+ServerProcedureInterface.ServerOperationType
+getServerOperationType()
+Given an operation type we can take decisions about what to 
do with pending operations.
+
+
+
+boolean
+hasMetaTableRegion()
+
+
+RemoteProcedureDispatcher.RemoteOperation
+remoteCallBuild(MasterProcedureEnvmasterProcedureEnv,
+   ServerNameremote)
+For building the 

[14/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
index 2e150bc..0b315b8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
@@ -25,22 +25,22 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
-021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
-022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+020import static 
org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
+021import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
+022import static 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
 023
-024import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029
-030import 
org.apache.hadoop.hbase.HRegionLocation;
-031import 
org.apache.hadoop.hbase.ServerName;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-035import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+024import java.util.List;
+025import 
java.util.concurrent.CompletableFuture;
+026import java.util.concurrent.TimeUnit;
+027import 
org.apache.hadoop.hbase.HRegionLocation;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.TableName;
+030import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+031import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033
+034import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
+035
 036import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 037import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
 038
@@ -83,432 +83,441 @@
 075
 076private RegionLocateType locateType = 
RegionLocateType.CURRENT;
 077
-078public 
SingleRequestCallerBuilderT table(TableName tableName) {
-079  this.tableName = tableName;
-080  return this;
-081}
-082
-083public 
SingleRequestCallerBuilderT row(byte[] row) {
-084  this.row = row;
-085  return this;
-086}
-087
-088public 
SingleRequestCallerBuilderT action(
-089
AsyncSingleRequestRpcRetryingCaller.CallableT callable) {
-090  this.callable = callable;
-091  return this;
-092}
-093
-094public 
SingleRequestCallerBuilderT operationTimeout(long operationTimeout, 
TimeUnit unit) {
-095  this.operationTimeoutNs = 
unit.toNanos(operationTimeout);
-096  return this;
-097}
-098
-099public 
SingleRequestCallerBuilderT rpcTimeout(long rpcTimeout, TimeUnit unit) 
{
-100  this.rpcTimeoutNs = 
unit.toNanos(rpcTimeout);
-101  return this;
-102}
-103
-104public 
SingleRequestCallerBuilderT locateType(RegionLocateType locateType) {
-105  this.locateType = locateType;
-106  return this;
-107}
-108
-109public 
SingleRequestCallerBuilderT pause(long pause, TimeUnit unit) {
-110  this.pauseNs = 
unit.toNanos(pause);
-111  return this;
-112}
-113
-114public 
SingleRequestCallerBuilderT maxAttempts(int maxAttempts) {
-115  this.maxAttempts = maxAttempts;
-116  return this;
-117}
-118
-119public 
SingleRequestCallerBuilderT startLogErrorsCnt(int startLogErrorsCnt) 
{
-120  this.startLogErrorsCnt = 
startLogErrorsCnt;
-121  return this;
-122}
-123
-124public 
AsyncSingleRequestRpcRetryingCallerT build() {
-125  return new 
AsyncSingleRequestRpcRetryingCaller(retryTimer, conn,
-126  checkNotNull(tableName, 
"tableName is null"), checkNotNull(row, "row is null"),
-127  checkNotNull(locateType, 
"locateType is null"), checkNotNull(callable, "action is null"),
-128  pauseNs, maxAttempts, 
operationTimeoutNs, rpcTimeoutNs, startLogErrorsCnt);
+078private int replicaId = 
RegionReplicaUtil.DEFAULT_REPLICA_ID;
+079
+080public 
SingleRequestCallerBuilderT table(TableName tableName) {
+081  this.tableName = tableName;
+082  return this;
+083}
+084
+085public 
SingleRequestCallerBuilderT row(byte[] row) {
+086  this.row = row;
+087  return this;
+088}
+089
+090public 
SingleRequestCallerBuilderT action(
+091

[14/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
deleted file mode 100644
index e692633..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
+++ /dev/null
@@ -1,2103 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/*
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.thrift;
-020
-021import static 
org.apache.hadoop.hbase.util.Bytes.getBytes;
-022
-023import java.io.IOException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.security.PrivilegedAction;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.TreeMap;
-036import 
java.util.concurrent.BlockingQueue;
-037import 
java.util.concurrent.ExecutorService;
-038import 
java.util.concurrent.LinkedBlockingQueue;
-039import 
java.util.concurrent.ThreadPoolExecutor;
-040import java.util.concurrent.TimeUnit;
-041
-042import 
javax.security.auth.callback.Callback;
-043import 
javax.security.auth.callback.UnsupportedCallbackException;
-044import 
javax.security.sasl.AuthorizeCallback;
-045import javax.security.sasl.SaslServer;
-046
-047import 
org.apache.commons.lang3.ArrayUtils;
-048import 
org.apache.hadoop.conf.Configuration;
-049import 
org.apache.hadoop.hbase.Cell.Type;
-050import 
org.apache.hadoop.hbase.CellBuilder;
-051import 
org.apache.hadoop.hbase.CellBuilderFactory;
-052import 
org.apache.hadoop.hbase.CellBuilderType;
-053import 
org.apache.hadoop.hbase.CellUtil;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HColumnDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.KeyValue;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.ServerName;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotFoundException;
-064import 
org.apache.hadoop.hbase.client.Admin;
-065import 
org.apache.hadoop.hbase.client.Append;
-066import 
org.apache.hadoop.hbase.client.Delete;
-067import 
org.apache.hadoop.hbase.client.Durability;
-068import 
org.apache.hadoop.hbase.client.Get;
-069import 
org.apache.hadoop.hbase.client.Increment;
-070import 
org.apache.hadoop.hbase.client.OperationWithAttributes;
-071import 
org.apache.hadoop.hbase.client.Put;
-072import 
org.apache.hadoop.hbase.client.RegionInfo;
-073import 
org.apache.hadoop.hbase.client.RegionLocator;
-074import 
org.apache.hadoop.hbase.client.Result;
-075import 
org.apache.hadoop.hbase.client.ResultScanner;
-076import 
org.apache.hadoop.hbase.client.Scan;
-077import 
org.apache.hadoop.hbase.client.Table;
-078import 
org.apache.hadoop.hbase.filter.Filter;
-079import 
org.apache.hadoop.hbase.filter.ParseFilter;
-080import 
org.apache.hadoop.hbase.filter.PrefixFilter;
-081import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-082import 
org.apache.hadoop.hbase.http.HttpServerUtil;
-083import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-084import 
org.apache.hadoop.hbase.security.SaslUtil;
-085import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-086import 
org.apache.hadoop.hbase.security.SecurityUtil;
-087import 
org.apache.hadoop.hbase.security.UserProvider;
-088import 
org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-089import 
org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-090import 
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-091import 

[14/51] [partial] hbase-site git commit: Published site at 3ab895979b643a2980bcdb7fee2078f14b614210.

2019-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheStats.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheStats.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheStats.html
index 646308c..296b406 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheStats.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheStats.html
@@ -1626,6 +1626,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html
index c3270a3..eccac9e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html
@@ -345,6 +345,6 @@ not permitted.)
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.html
index 1f7161d..a98acf7 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/Cacheable.html
@@ -347,6 +347,6 @@ extends 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
index 9263a63..ef856d1 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html
@@ -284,6 +284,6 @@ public interface Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
index 39456da..954826a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.html
@@ -373,6 +373,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/CachedBlock.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CachedBlock.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CachedBlock.html
index 4a03c9a..3c085f0 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CachedBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CachedBlock.html
@@ -295,6 +295,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.html

[14/51] [partial] hbase-site git commit: Published site at 7820ba1dbdba58b1002cdfde08eb21aa7a0bb6da.

2018-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09ea0d5f/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 0f5a095..50bf692 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -78,8712 +78,8714 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.hadoop.conf.Configuration;
-074import org.apache.hadoop.fs.FileStatus;
-075import org.apache.hadoop.fs.FileSystem;
-076import 
org.apache.hadoop.fs.LocatedFileStatus;
-077import org.apache.hadoop.fs.Path;
-078import org.apache.hadoop.hbase.Cell;
-079import 
org.apache.hadoop.hbase.CellBuilderType;
-080import 
org.apache.hadoop.hbase.CellComparator;
-081import 
org.apache.hadoop.hbase.CellComparatorImpl;
-082import 
org.apache.hadoop.hbase.CellScanner;
-083import 
org.apache.hadoop.hbase.CellUtil;
-084import 
org.apache.hadoop.hbase.CompareOperator;
-085import 
org.apache.hadoop.hbase.CompoundConfiguration;
-086import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-087import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-088import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-089import 
org.apache.hadoop.hbase.HConstants;
-090import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-091import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-092import 
org.apache.hadoop.hbase.KeyValue;
-093import 
org.apache.hadoop.hbase.KeyValueUtil;
-094import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-095import 
org.apache.hadoop.hbase.NotServingRegionException;
-096import 
org.apache.hadoop.hbase.PrivateCellUtil;
-097import 
org.apache.hadoop.hbase.RegionTooBusyException;
-098import org.apache.hadoop.hbase.Tag;
-099import org.apache.hadoop.hbase.TagUtil;
-100import 
org.apache.hadoop.hbase.UnknownScannerException;
-101import 
org.apache.hadoop.hbase.client.Append;
-102import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-103import 
org.apache.hadoop.hbase.client.CompactionState;
-104import 
org.apache.hadoop.hbase.client.Delete;
-105import 
org.apache.hadoop.hbase.client.Durability;
-106import 
org.apache.hadoop.hbase.client.Get;
-107import 
org.apache.hadoop.hbase.client.Increment;
-108import 
org.apache.hadoop.hbase.client.IsolationLevel;
-109import 
org.apache.hadoop.hbase.client.Mutation;
-110import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-111import 
org.apache.hadoop.hbase.client.Put;
-112import 
org.apache.hadoop.hbase.client.RegionInfo;
-113import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-114import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-115import 
org.apache.hadoop.hbase.client.Result;
-116import 
org.apache.hadoop.hbase.client.RowMutations;
-117import 
org.apache.hadoop.hbase.client.Scan;
-118import 
org.apache.hadoop.hbase.client.TableDescriptor;
-119import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-120import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-121import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-122import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-123import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-124import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-125import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-126import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-127import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-128import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-129import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-130import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-131import 
org.apache.hadoop.hbase.io.HFileLink;
-132import 
org.apache.hadoop.hbase.io.HeapSize;
-133import 
org.apache.hadoop.hbase.io.TimeRange;
-134import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.mob.MobFileCache;
-141import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-142import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-143import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-144import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-145import 

[14/51] [partial] hbase-site git commit: Published site at c448604ceb987d113913f0583452b2abce04db0d.

2018-12-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f8b8424/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexChunk.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexChunk.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexChunk.html
index 1124f8b..0c29054 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexChunk.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexChunk.html
@@ -,587 +,592 @@
 1103  
blockStream.write(midKeyMetadata);
 1104
blockWriter.writeHeaderAndData(out);
 1105if (cacheConf != null) {
-1106  HFileBlock blockForCaching = 
blockWriter.getBlockForCaching(cacheConf);
-1107  
cacheConf.getBlockCache().cacheBlock(new BlockCacheKey(nameForCaching,
-1108rootLevelIndexPos, true, 
blockForCaching.getBlockType()), blockForCaching);
-1109}
-1110  }
-
-1112  // Add root index block size
-1113  totalBlockOnDiskSize += 
blockWriter.getOnDiskSizeWithoutHeader();
-1114  totalBlockUncompressedSize +=
-1115  
blockWriter.getUncompressedSizeWithoutHeader();
-1116
-1117  if (LOG.isTraceEnabled()) {
-1118LOG.trace("Wrote a " + numLevels 
+ "-level index with root level at pos "
-1119  + rootLevelIndexPos + ", " + 
rootChunk.getNumEntries()
-1120  + " root-level entries, " + 
totalNumEntries + " total entries, "
-1121  + 
StringUtils.humanReadableInt(this.totalBlockOnDiskSize) +
-1122  " on-disk size, "
-1123  + 
StringUtils.humanReadableInt(totalBlockUncompressedSize) +
-1124  " total uncompressed 
size.");
-1125  }
-1126  return rootLevelIndexPos;
-1127}
-1128
-1129/**
-1130 * Writes the block index data as a 
single level only. Does not do any
-1131 * block framing.
-1132 *
-1133 * @param out the buffered output 
stream to write the index to. Typically a
-1134 *  stream writing into an 
{@link HFile} block.
-1135 * @param description a short 
description of the index being written. Used
-1136 *  in a log message.
-1137 * @throws IOException
-1138 */
-1139public void 
writeSingleLevelIndex(DataOutput out, String description)
-1140throws IOException {
-1141  expectNumLevels(1);
-1142
-1143  if (!singleLevelOnly)
-1144throw new 
IOException("Single-level mode is turned off");
-1145
-1146  if (rootChunk.getNumEntries()  
0)
-1147throw new 
IOException("Root-level entries already added in " +
-1148"single-level mode");
-1149
-1150  rootChunk = curInlineChunk;
-1151  curInlineChunk = new 
BlockIndexChunk();
-1152
-1153  if (LOG.isTraceEnabled()) {
-1154LOG.trace("Wrote a single-level 
" + description + " index with "
-1155  + rootChunk.getNumEntries() + 
" entries, " + rootChunk.getRootSize()
-1156  + " bytes");
-1157  }
-1158  rootChunk.writeRoot(out);
-1159}
-1160
-1161/**
-1162 * Split the current level of the 
block index into intermediate index
-1163 * blocks of permitted size and 
write those blocks to disk. Return the next
-1164 * level of the block index 
referencing those intermediate-level blocks.
-1165 *
-1166 * @param out
-1167 * @param currentLevel the current 
level of the block index, such as the a
-1168 *  chunk referencing all 
leaf-level index blocks
-1169 * @return the parent level block 
index, which becomes the root index after
-1170 * a few (usually zero) 
iterations
-1171 * @throws IOException
-1172 */
-1173private BlockIndexChunk 
writeIntermediateLevel(FSDataOutputStream out,
-1174BlockIndexChunk currentLevel) 
throws IOException {
-1175  // Entries referencing 
intermediate-level blocks we are about to create.
-1176  BlockIndexChunk parent = new 
BlockIndexChunk();
-1177
-1178  // The current intermediate-level 
block index chunk.
-1179  BlockIndexChunk curChunk = new 
BlockIndexChunk();
-1180
-1181  for (int i = 0; i  
currentLevel.getNumEntries(); ++i) {
-1182
curChunk.add(currentLevel.getBlockKey(i),
-1183
currentLevel.getBlockOffset(i), currentLevel.getOnDiskDataSize(i));
-1184
-1185// HBASE-16288: We have to have 
at least minIndexNumEntries(16) items in the index so that
-1186// we won't end up with too-many 
levels for a index with very large rowKeys. Also, if the
-1187// first key is larger than 
maxChunkSize this will cause infinite recursion.
-1188if (i = minIndexNumEntries 
 curChunk.getRootSize() = maxChunkSize) {
-1189  writeIntermediateBlock(out, 
parent, curChunk);
-1190}
-1191  }
-1192
-1193  if 

[14/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

2018-12-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -142,5192 +142,5186 @@
 134import org.apache.hadoop.hbase.wal.WAL;
 135import 
org.apache.hadoop.hbase.wal.WALFactory;
 136import 
org.apache.hadoop.hbase.wal.WALSplitter;
-137import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-138import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-139import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-140import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-141import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-142import 
org.apache.hadoop.ipc.RemoteException;
-143import 
org.apache.hadoop.security.UserGroupInformation;
-144import 
org.apache.hadoop.util.ReflectionUtils;
-145import org.apache.hadoop.util.Tool;
-146import 
org.apache.hadoop.util.ToolRunner;
-147import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-148import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
-149import 
org.apache.yetus.audience.InterfaceAudience;
-150import 
org.apache.yetus.audience.InterfaceStability;
-151import 
org.apache.zookeeper.KeeperException;
-152import org.slf4j.Logger;
-153import org.slf4j.LoggerFactory;
-154
-155import 
org.apache.hbase.thirdparty.com.google.common.base.Joiner;
-156import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-157import 
org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;
-158import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-159import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-160import 
org.apache.hbase.thirdparty.com.google.common.collect.Ordering;
-161import 
org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;
-162
-163import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-164import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-165
-166/**
-167 * HBaseFsck (hbck) is a tool for 
checking and repairing region consistency and
-168 * table integrity problems in a 
corrupted HBase. This tool was written for hbase-1.x. It does not
-169 * work with hbase-2.x; it can read state 
but is not allowed to change state; i.e. effect 'repair'.
-170 * See hbck2 (HBASE-19121) for a hbck 
tool for hbase2.
-171 *
-172 * p
-173 * Region consistency checks verify that 
hbase:meta, region deployment on region
-174 * servers and the state of data in HDFS 
(.regioninfo files) all are in
-175 * accordance.
-176 * p
-177 * Table integrity checks verify that all 
possible row keys resolve to exactly
-178 * one region of a table.  This means 
there are no individual degenerate
-179 * or backwards regions; no holes between 
regions; and that there are no
-180 * overlapping regions.
-181 * p
-182 * The general repair strategy works in 
two phases:
-183 * ol
-184 * li Repair Table Integrity on 
HDFS. (merge or fabricate regions)
-185 * li Repair Region Consistency 
with hbase:meta and assignments
-186 * /ol
-187 * p
-188 * For table integrity repairs, the 
tables' region directories are scanned
-189 * for .regioninfo files.  Each table's 
integrity is then verified.  If there
-190 * are any orphan regions (regions with 
no .regioninfo files) or holes, new
-191 * regions are fabricated.  Backwards 
regions are sidelined as well as empty
-192 * degenerate (endkey==startkey) regions. 
 If there are any overlapping regions,
-193 * a new region is created and all data 
is merged into the new region.
-194 * p
-195 * Table integrity repairs deal solely 
with HDFS and could potentially be done
-196 * offline -- the hbase region servers or 
master do not need to be running.
-197 * This phase can eventually be used to 
completely reconstruct the hbase:meta table in
-198 * an offline fashion.
-199 * p
-200 * Region consistency requires three 
conditions -- 1) valid .regioninfo file
-201 * present in an HDFS region dir,  2) 
valid row with .regioninfo data in META,
-202 * and 3) a region is deployed only at 
the regionserver that was assigned to
-203 * with proper state in the master.
-204 * p
-205 * Region consistency repairs require 
hbase to be online so that hbck can
-206 * contact the HBase master and region 
servers.  The hbck#connect() method must
-207 * first be called successfully.  Much of 
the region consistency information
-208 * is transient and less risky to 
repair.
-209 * p
-210 * If hbck is run from the command line, 
there are a handful of arguments that
-211 * can be used to limit the kinds of 

[14/51] [partial] hbase-site git commit: Published site at 1acbd36c903b048141866b143507bfce124a5c5f.

2018-11-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index ac5997f..1da2c31 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.CompactionChecker
+private static class HRegionServer.CompactionChecker
 extends ScheduledChore
 
 
@@ -233,7 +233,7 @@ extends 
 
 instance
-private finalHRegionServer instance
+private finalHRegionServer instance
 
 
 
@@ -242,7 +242,7 @@ extends 
 
 majorCompactPriority
-private finalint majorCompactPriority
+private finalint majorCompactPriority
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 DEFAULT_PRIORITY
-private static finalint DEFAULT_PRIORITY
+private static finalint DEFAULT_PRIORITY
 
 See Also:
 Constant
 Field Values
@@ -264,7 +264,7 @@ extends 
 
 iteration
-privatelong iteration
+privatelong iteration
 
 
 
@@ -281,7 +281,7 @@ extends 
 
 CompactionChecker
-CompactionChecker(HRegionServerh,
+CompactionChecker(HRegionServerh,
   intsleepTime,
   Stoppablestopper)
 
@@ -300,7 +300,7 @@ extends 
 
 chore
-protectedvoidchore()
+protectedvoidchore()
 Description copied from 
class:ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 91904e9..6a88e40 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.MovedRegionInfo
+private static class HRegionServer.MovedRegionInfo
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -218,7 +218,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 serverName
-private finalServerName serverName
+private finalServerName serverName
 
 
 
@@ -227,7 +227,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 seqNum
-private finallong seqNum
+private finallong seqNum
 
 
 
@@ -236,7 +236,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ts
-private finallong ts
+private finallong ts
 
 
 
@@ -253,7 +253,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MovedRegionInfo
-publicMovedRegionInfo(ServerNameserverName,
+publicMovedRegionInfo(ServerNameserverName,
longcloseSeqNum)
 
 
@@ -271,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getServerName
-publicServerNamegetServerName()
+publicServerNamegetServerName()
 
 
 
@@ -280,7 +280,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getSeqNum
-publiclonggetSeqNum()
+publiclonggetSeqNum()
 
 
 
@@ -289,7 +289,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMoveTime
-publiclonggetMoveTime()
+publiclonggetMoveTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 4934310..8e1bb52 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static final class HRegionServer.MovedRegionsCleaner
+protected static final class HRegionServer.MovedRegionsCleaner
 extends ScheduledChore
 implements Stoppable
 Creates a Chore thread to clean the moved region 
cache.
@@ -242,7 +242,7 @@ implements 
 
 regionServer
-privateHRegionServer regionServer
+privateHRegionServer regionServer
 
 
 
@@ -251,7 +251,7 @@ implements 
 
 

[14/51] [partial] hbase-site git commit: Published site at 130057f13774f6b213cdb06952c805a29d59396e.

2018-11-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/src-html/org/apache/hadoop/hbase/security/access/UserPermission.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/UserPermission.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/UserPermission.html
index da23956..abb7072 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/UserPermission.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/UserPermission.html
@@ -26,191 +26,164 @@
 018
 019package 
org.apache.hadoop.hbase.security.access;
 020
-021import java.io.DataInput;
-022import java.io.DataOutput;
-023import java.io.IOException;
-024
-025import 
org.apache.hadoop.hbase.TableName;
-026import 
org.apache.yetus.audience.InterfaceAudience;
-027import org.slf4j.Logger;
-028import org.slf4j.LoggerFactory;
-029import 
org.apache.hadoop.hbase.util.Bytes;
-030
-031/**
-032 * Represents an authorization for access 
over the given table, column family
-033 * plus qualifier, for the given user.
-034 */
-035@InterfaceAudience.Private
-036public class UserPermission extends 
TablePermission {
-037  private static final Logger LOG = 
LoggerFactory.getLogger(UserPermission.class);
-038
-039  private byte[] user;
-040
-041  /** Nullary constructor for Writable, 
do not use */
-042  public UserPermission() {
-043super();
+021import java.util.Objects;
+022
+023import 
org.apache.hadoop.hbase.TableName;
+024import 
org.apache.yetus.audience.InterfaceAudience;
+025
+026/**
+027 * UserPermission consists of a user name 
and a permission.
+028 * Permission can be one of [Global, 
Namespace, Table] permission.
+029 */
+030@InterfaceAudience.Private
+031public class UserPermission {
+032
+033  private String user;
+034  private Permission permission;
+035
+036  /**
+037   * Construct a global user 
permission.
+038   * @param user user name
+039   * @param assigned assigned actions
+040   */
+041  public UserPermission(String user, 
Permission.Action... assigned) {
+042this.user = user;
+043this.permission = new 
GlobalPermission(assigned);
 044  }
 045
 046  /**
-047   * Creates a new instance for the given 
user.
-048   * @param user the user
-049   * @param assigned the list of allowed 
actions
+047   * Construct a global user 
permission.
+048   * @param user user name
+049   * @param actionCode action codes
 050   */
-051  public UserPermission(byte[] user, 
Action... assigned) {
-052super(null, null, null, assigned);
-053this.user = user;
+051  public UserPermission(String user, 
byte[] actionCode) {
+052this.user = user;
+053this.permission = new 
GlobalPermission(actionCode);
 054  }
 055
 056  /**
-057   * Creates a new instance for the given 
user,
-058   * matching the actions with the given 
codes.
-059   * @param user the user
-060   * @param actionCodes the list of 
allowed action codes
+057   * Construct a namespace user 
permission.
+058   * @param user user name
+059   * @param namespace namespace
+060   * @param assigned assigned actions
 061   */
-062  public UserPermission(byte[] user, 
byte[] actionCodes) {
-063super(null, null, null, 
actionCodes);
-064this.user = user;
+062  public UserPermission(String user, 
String namespace, Permission.Action... assigned) {
+063this.user = user;
+064this.permission = new 
NamespacePermission(namespace, assigned);
 065  }
 066
 067  /**
-068   * Creates a new instance for the given 
user.
-069   * @param user the user
-070   * @param namespace
-071   * @param assigned the list of allowed 
actions
+068   * Construct a table user permission.
+069   * @param user user name
+070   * @param tableName table name
+071   * @param assigned assigned actions
 072   */
-073  public UserPermission(byte[] user, 
String namespace, Action... assigned) {
-074super(namespace, assigned);
-075this.user = user;
+073  public UserPermission(String user, 
TableName tableName, Permission.Action... assigned) {
+074this.user = user;
+075this.permission = new 
TablePermission(tableName, assigned);
 076  }
 077
 078  /**
-079   * Creates a new instance for the given 
user,
-080   * matching the actions with the given 
codes.
-081   * @param user the user
-082   * @param namespace
-083   * @param actionCodes the list of 
allowed action codes
+079   * Construct a table:family user 
permission.
+080   * @param user user name
+081   * @param tableName table name
+082   * @param family family name of table
+083   * @param assigned assigned actions
 084   */
-085  public UserPermission(byte[] user, 
String namespace, byte[] actionCodes) {
-086super(namespace, actionCodes);
-087this.user = user;
+085  public UserPermission(String user, 
TableName tableName, byte[] family,
+086Permission.Action... assigned) {
+087this(user, tableName, family, null, 
assigned);
 088  }
 089
 090  /**
-091   * 

[14/51] [partial] hbase-site git commit: Published site at d5e4faacc354c1bc4d93efa71ca97ee3a056123e.

2018-10-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b5e107c3/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
index 42e78da..c879b39 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
@@ -3821,58 +3821,62 @@
 
 
 static HBaseClassTestRule
-TestMasterObserverPostCalls.CLASS_RULE
+TestSchedulerQueueDeadLock.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestWALProcedureStoreOnHDFS.CLASS_RULE
+TestMasterObserverPostCalls.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestReopenTableRegionsProcedureInfiniteLoop.CLASS_RULE
+TestWALProcedureStoreOnHDFS.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestEnableTableProcedure.CLASS_RULE
+TestReopenTableRegionsProcedureInfiniteLoop.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestServerCrashProcedureWithReplicas.CLASS_RULE
+TestEnableTableProcedure.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMasterProcedureScheduler.CLASS_RULE
+TestServerCrashProcedureWithReplicas.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestSafemodeBringsDownMaster.CLASS_RULE
+TestMasterProcedureScheduler.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRestoreSnapshotProcedure.CLASS_RULE
+TestSafemodeBringsDownMaster.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMasterFailoverWithProcedures.CLASS_RULE
+TestRestoreSnapshotProcedure.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMasterProcedureWalLease.CLASS_RULE
+TestMasterFailoverWithProcedures.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestTableDescriptorModificationFromClient.CLASS_RULE
+TestMasterProcedureWalLease.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestDeleteColumnFamilyProcedureFromClient.CLASS_RULE
+TestTableDescriptorModificationFromClient.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestCloneSnapshotProcedure.CLASS_RULE
+TestDeleteColumnFamilyProcedureFromClient.CLASS_RULE
 
 
 static HBaseClassTestRule
+TestCloneSnapshotProcedure.CLASS_RULE
+
+
+static HBaseClassTestRule
 TestProcedureAdmin.CLASS_RULE
 
 
@@ -4265,18 +4269,22 @@
 
 
 static HBaseClassTestRule
-TestProcedureRollbackAIOOB.CLASS_RULE
+TestLockAndQueue.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestProcedureToString.CLASS_RULE
+TestProcedureRollbackAIOOB.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestProcedureEvents.CLASS_RULE
+TestProcedureToString.CLASS_RULE
 
 
 static HBaseClassTestRule
+TestProcedureEvents.CLASS_RULE
+
+
+static HBaseClassTestRule
 TestProcedureSkipPersistence.CLASS_RULE
 
 
@@ -4593,574 +4601,578 @@
 
 
 static HBaseClassTestRule
-TestDataBlockEncodingTool.CLASS_RULE
+TestRegionServerAbortTimeout.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMobStoreScanner.CLASS_RULE
+TestDataBlockEncodingTool.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestJoinedScanners.CLASS_RULE
+TestMobStoreScanner.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestTags.CLASS_RULE
+TestJoinedScanners.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestHRegionFileSystem.CLASS_RULE
+TestTags.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionOpen.CLASS_RULE
+TestHRegionFileSystem.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestKeyValueScanFixture.CLASS_RULE
+TestRegionOpen.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestWALLockup.CLASS_RULE
+TestKeyValueScanFixture.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestLogRoller.CLASS_RULE
+TestWALLockup.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestColumnSeeking.CLASS_RULE
+TestLogRoller.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMultiLogThreshold.CLASS_RULE
+TestColumnSeeking.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestShutdownWhileWALBroken.CLASS_RULE
+TestMultiLogThreshold.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestHeapMemoryManager.CLASS_RULE
+TestShutdownWhileWALBroken.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestFlushRegionEntry.CLASS_RULE
+TestHeapMemoryManager.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMutateRowsRecovery.CLASS_RULE
+TestFlushRegionEntry.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestCellSkipListSet.CLASS_RULE
+TestMutateRowsRecovery.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestStoreFileInfo.CLASS_RULE
+TestCellSkipListSet.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionReplicaFailover.CLASS_RULE
+TestStoreFileInfo.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMajorCompaction.CLASS_RULE
+TestRegionReplicaFailover.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestFlushLifeCycleTracker.CLASS_RULE
+TestMajorCompaction.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionReplicas.CLASS_RULE
+TestFlushLifeCycleTracker.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestBlocksRead.CLASS_RULE
+TestRegionReplicas.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestSCVFWithMiniCluster.CLASS_RULE
+TestBlocksRead.CLASS_RULE
 
 
 static HBaseClassTestRule

[14/51] [partial] hbase-site git commit: Published site at 3fe8649b2c9ba1271c25e8f476548907e4c7a90d.

2018-10-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8f09a71d/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
index c7d99b2..9d1542c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
@@ -382,1357 +382,1365 @@
 374for (int i = 0; i  
this.curFunctionCosts.length; i++) {
 375  curFunctionCosts[i] = 
tempFunctionCosts[i];
 376}
-377LOG.info("start 
StochasticLoadBalancer.balancer, initCost=" + currentCost + ", functionCost="
-378+ functionCost());
+377double initCost = currentCost;
+378double newCost = currentCost;
 379
-380double initCost = currentCost;
-381double newCost = currentCost;
-382
-383long computedMaxSteps;
-384if (runMaxSteps) {
-385  computedMaxSteps = 
Math.max(this.maxSteps,
-386  ((long)cluster.numRegions * 
(long)this.stepsPerRegion * (long)cluster.numServers));
-387} else {
-388  computedMaxSteps = 
Math.min(this.maxSteps,
-389  ((long)cluster.numRegions * 
(long)this.stepsPerRegion * (long)cluster.numServers));
-390}
-391// Perform a stochastic walk to see 
if we can get a good fit.
-392long step;
-393
-394for (step = 0; step  
computedMaxSteps; step++) {
-395  Cluster.Action action = 
nextAction(cluster);
-396
-397  if (action.type == Type.NULL) {
-398continue;
-399  }
-400
-401  cluster.doAction(action);
-402  updateCostsWithAction(cluster, 
action);
-403
-404  newCost = computeCost(cluster, 
currentCost);
-405
-406  // Should this be kept?
-407  if (newCost  currentCost) {
-408currentCost = newCost;
-409
-410// save for JMX
-411curOverallCost = currentCost;
-412for (int i = 0; i  
this.curFunctionCosts.length; i++) {
-413  curFunctionCosts[i] = 
tempFunctionCosts[i];
-414}
-415  } else {
-416// Put things back the way they 
were before.
-417// TODO: undo by remembering old 
values
-418Action undoAction = 
action.undoAction();
-419cluster.doAction(undoAction);
-420updateCostsWithAction(cluster, 
undoAction);
-421  }
-422
-423  if 
(EnvironmentEdgeManager.currentTime() - startTime 
-424  maxRunningTime) {
-425break;
-426  }
-427}
-428long endTime = 
EnvironmentEdgeManager.currentTime();
-429
-430
metricsBalancer.balanceCluster(endTime - startTime);
-431
-432// update costs metrics
-433updateStochasticCosts(tableName, 
curOverallCost, curFunctionCosts);
-434if (initCost  currentCost) {
-435  plans = 
createRegionPlans(cluster);
-436  LOG.info("Finished computing new 
load balance plan. Computation took {}" +
-437" to try {} different iterations. 
 Found a solution that moves " +
-438"{} regions; Going from a 
computed cost of {}" +
-439" to a new cost of {}", 
java.time.Duration.ofMillis(endTime - startTime),
-440step, plans.size(), initCost, 
currentCost);
-441  return plans;
-442}
-443LOG.info("Could not find a better 
load balance plan.  Tried {} different configurations in " +
-444  "{}, and did not find anything with 
a computed cost less than {}", step,
-445  java.time.Duration.ofMillis(endTime 
- startTime), initCost);
-446return null;
-447  }
-448
-449  /**
-450   * update costs to JMX
-451   */
-452  private void 
updateStochasticCosts(TableName tableName, Double overall, Double[] subCosts) 
{
-453if (tableName == null) return;
-454
-455// check if the metricsBalancer is 
MetricsStochasticBalancer before casting
-456if (metricsBalancer instanceof 
MetricsStochasticBalancer) {
-457  MetricsStochasticBalancer balancer 
= (MetricsStochasticBalancer) metricsBalancer;
-458  // overall cost
-459  
balancer.updateStochasticCost(tableName.getNameAsString(),
-460"Overall", "Overall cost", 
overall);
-461
-462  // each cost function
-463  for (int i = 0; i  
costFunctions.length; i++) {
-464CostFunction costFunction = 
costFunctions[i];
-465String costFunctionName = 
costFunction.getClass().getSimpleName();
-466Double costPercent = (overall == 
0) ? 0 : (subCosts[i] / overall);
-467// TODO: cost function may need a 
specific description
-468
balancer.updateStochasticCost(tableName.getNameAsString(), costFunctionName,
-469  "The percent of " + 
costFunctionName, costPercent);
-470  }
-471}
-472  

[14/51] [partial] hbase-site git commit: Published site at 7adf590106826b9e4432cfeee06acdc0ccff8c6e.

2018-10-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/425db230/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
index 0c894de..8729895 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
@@ -179,4145 +179,4146 @@
 171 * avoiding port contention if another 
local HBase instance is already running).
 172 * pTo preserve test data 
directories, pass the system property "hbase.testing.preserve.testdir"
 173 * setting it to true.
-174 */
-175@InterfaceAudience.Public
-176@SuppressWarnings("deprecation")
-177public class HBaseTestingUtility extends 
HBaseZKTestingUtility {
-178
-179  /**
-180   * System property key to get test 
directory value. Name is as it is because mini dfs has
-181   * hard-codings to put test data here. 
It should NOT be used directly in HBase, as it's a property
-182   * used in mini dfs.
-183   * @deprecated can be used only with 
mini dfs
-184   */
-185  @Deprecated
-186  private static final String 
TEST_DIRECTORY_KEY = "test.build.data";
-187
-188  public static final String 
REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
-189  /**
-190   * The default number of regions per 
regionserver when creating a pre-split
-191   * table.
-192   */
-193  public static final int 
DEFAULT_REGIONS_PER_SERVER = 3;
-194
+174 * Trigger pre commit.
+175 */
+176@InterfaceAudience.Public
+177@SuppressWarnings("deprecation")
+178public class HBaseTestingUtility extends 
HBaseZKTestingUtility {
+179
+180  /**
+181   * System property key to get test 
directory value. Name is as it is because mini dfs has
+182   * hard-codings to put test data here. 
It should NOT be used directly in HBase, as it's a property
+183   * used in mini dfs.
+184   * @deprecated can be used only with 
mini dfs
+185   */
+186  @Deprecated
+187  private static final String 
TEST_DIRECTORY_KEY = "test.build.data";
+188
+189  public static final String 
REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
+190  /**
+191   * The default number of regions per 
regionserver when creating a pre-split
+192   * table.
+193   */
+194  public static final int 
DEFAULT_REGIONS_PER_SERVER = 3;
 195
-196  public static final String 
PRESPLIT_TEST_TABLE_KEY = "hbase.test.pre-split-table";
-197  public static final boolean 
PRESPLIT_TEST_TABLE = true;
-198
-199  private MiniDFSCluster dfsCluster = 
null;
-200
-201  private volatile HBaseCluster 
hbaseCluster = null;
-202  private MiniMRCluster mrCluster = 
null;
-203
-204  /** If there is a mini cluster running 
for this testing utility instance. */
-205  private volatile boolean 
miniClusterRunning;
-206
-207  private String hadoopLogDir;
-208
-209  /** Directory on test filesystem where 
we put the data for this instance of
-210* HBaseTestingUtility*/
-211  private Path dataTestDirOnTestFS = 
null;
-212
-213  /**
-214   * Shared cluster connection.
-215   */
-216  private volatile Connection 
connection;
-217
-218  /** Filesystem URI used for map-reduce 
mini-cluster setup */
-219  private static String FS_URI;
-220
-221  /** This is for unit tests 
parameterized with a single boolean. */
-222  public static final 
ListObject[] MEMSTORETS_TAGS_PARAMETRIZED = 
memStoreTSAndTagsCombination();
-223
-224  /**
-225   * Checks to see if a specific port is 
available.
-226   *
-227   * @param port the port number to check 
for availability
-228   * @return tttrue/tt if 
the port is available, or ttfalse/tt if not
-229   */
-230  public static boolean available(int 
port) {
-231ServerSocket ss = null;
-232DatagramSocket ds = null;
-233try {
-234  ss = new ServerSocket(port);
-235  ss.setReuseAddress(true);
-236  ds = new DatagramSocket(port);
-237  ds.setReuseAddress(true);
-238  return true;
-239} catch (IOException e) {
-240  // Do nothing
-241} finally {
-242  if (ds != null) {
-243ds.close();
-244  }
-245
-246  if (ss != null) {
-247try {
-248  ss.close();
-249} catch (IOException e) {
-250  /* should not be thrown */
-251}
-252  }
-253}
-254
-255return false;
-256  }
-257
-258  /**
-259   * Create all combinations of Bloom 
filters and compression algorithms for
-260   * testing.
-261   */
-262  private static ListObject[] 
bloomAndCompressionCombinations() {
-263ListObject[] configurations = 
new ArrayList();
-264for (Compression.Algorithm comprAlgo 
:
-265 
HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
-266  for (BloomType bloomType : 
BloomType.values()) {
-267configurations.add(new Object[] { 
comprAlgo, bloomType });
-268  }
-269}
-270return 

[14/51] [partial] hbase-site git commit: Published site at 5fbb227deb365fe812d433fe39b85ac4b0ddee20.

2018-10-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
index 3f0afcb..b5a8205 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
@@ -288,7 +288,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 doAcquireLock, doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId, getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner, hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading,
 needPersistence,
 releaseLock,
 removeStackIndex, resetPersistence,
 restoreLock,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState, 
setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringClassDetails, toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 waitInitialized, wasExecuted
+acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 doAcquireLock, doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId, getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner, hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading,
 needPersistence,
 releaseLock,
 remove
 StackIndex, resetPersistence,
 restoreLock,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, 
setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringClassDetails, toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 waitInitialized, wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
index 209f474..175527f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class ProcedureExecutor.FailedProcedureTEnvironment
+public static class ProcedureExecutor.FailedProcedureTEnvironment
 extends ProcedureTEnvironment
 
 
@@ -250,7 +250,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 doAcquireLock, doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes, getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent, hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable, 
isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading,
 needPersistence,
 releaseLock,
 removeStackIndex,
 resetPer
 

[14/51] [partial] hbase-site git commit: Published site at 821e4d7de2d576189f4288d1c2acf9e9a9471f5c.

2018-10-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/323b17d9/testdevapidocs/org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineProcedure.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineProcedure.html
 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineProcedure.html
new file mode 100644
index 000..c3cc114
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineProcedure.html
@@ -0,0 +1,449 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+TestProcedureBypass.StuckStateMachineProcedure (Apache HBase 
3.0.0-SNAPSHOT Test API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.procedure2
+Class TestProcedureBypass.StuckStateMachineProcedure
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureTEnvironment
+
+
+org.apache.hadoop.hbase.procedure2.StateMachineProcedureTEnv,TState
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopStateMachineProcedureTestProcedureBypass.TestProcEnv,TestProcedureBypass.StuckStateMachineState
+
+
+org.apache.hadoop.hbase.procedure2.TestProcedureBypass.StuckStateMachineProcedure
+
+
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">Comparableorg.apache.hadoop.hbase.procedure2.ProcedureTestProcedureBypass.TestProcEnv
+
+
+Enclosing class:
+TestProcedureBypass
+
+
+
+public static class TestProcedureBypass.StuckStateMachineProcedure
+extends ProcedureTestingUtility.NoopStateMachineProcedureTestProcedureBypass.TestProcEnv,TestProcedureBypass.StuckStateMachineState
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.procedure2.StateMachineProcedure
+org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
+
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+org.apache.hadoop.hbase.procedure2.Procedure.LockState
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
+stop
+
+
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.procedure2.StateMachineProcedure
+stateCount
+
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+NO_PROC_ID, NO_TIMEOUT
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+StuckStateMachineProcedure()
+
+
+StuckStateMachineProcedure(TestProcedureBypass.TestProcEnvenv,
+  TestProcedureBypass.StuckStateMachineStateinitialState)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected 
org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
+executeFromState(TestProcedureBypass.TestProcEnvenv,
+TestProcedureBypass.StuckStateMachineStatetState)
+
+
+protected TestProcedureBypass.StuckStateMachineState
+getState(intstateId)
+
+
+protected int
+getStateId(TestProcedureBypass.StuckStateMachineStatetState)
+
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopStateMachineProcedure
+getInitialState,
 rollbackState
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.procedure2.StateMachineProcedure
+abort, addChildProcedure, deserializeStateData, execute, failIfAborted, 
getCurrentState, getCurrentStateId, 

[14/51] [partial] hbase-site git commit: Published site at fa5fa6ecdd071b72b58971058ff3ab9d28c3e709.

2018-10-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d1341859/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
index e1b183b..b456cd2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
@@ -53,1338 +53,1354 @@
 045import 
org.apache.hadoop.hbase.log.HBaseMarkers;
 046import 
org.apache.hadoop.hbase.procedure2.Procedure;
 047import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-048import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreBase;
-049import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
-050import 
org.apache.hadoop.hbase.procedure2.util.ByteSlot;
-051import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
-052import 
org.apache.hadoop.hbase.util.CommonFSUtils;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.hadoop.ipc.RemoteException;
-055import 
org.apache.yetus.audience.InterfaceAudience;
-056import org.slf4j.Logger;
-057import org.slf4j.LoggerFactory;
-058
-059import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.queue.CircularFifoQueue;
-061
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
-063
-064/**
-065 * WAL implementation of the 
ProcedureStore.
-066 * p/
-067 * When starting, the upper layer will 
first call {@link #start(int)}, then {@link #recoverLease()},
-068 * then {@link #load(ProcedureLoader)}.
-069 * p/
-070 * In {@link #recoverLease()}, we will 
get the lease by closing all the existing wal files(by
-071 * calling recoverFileLease), and 
creating a new wal writer. And we will also get the list of all
-072 * the old wal files.
-073 * p/
-074 * FIXME: notice that the current recover 
lease implementation is problematic, it can not deal with
-075 * the races if there are two master both 
wants to acquire the lease...
-076 * p/
-077 * In {@link #load(ProcedureLoader)} 
method, we will load all the active procedures. See the
-078 * comments of this method for more 
details.
-079 * p/
-080 * The actual logging way is a bit like 
our FileSystem based WAL implementation as RS side. There is
-081 * a {@link #slots}, which is more like 
the ring buffer, and in the insert, update and delete
-082 * methods we will put thing into the 
{@link #slots} and wait. And there is a background sync
-083 * thread(see the {@link #syncLoop()} 
method) which get data from the {@link #slots} and write them
-084 * to the FileSystem, and notify the 
caller that we have finished.
-085 * p/
-086 * TODO: try using disruptor to increase 
performance and simplify the logic?
-087 * p/
-088 * The {@link #storeTracker} keeps track 
of the modified procedures in the newest wal file, which is
-089 * also the one being written currently. 
And the deleted bits in it are for all the procedures, not
-090 * only the ones in the newest wal file. 
And when rolling a log, we will first store it in the
-091 * trailer of the current wal file, and 
then reset its modified bits, so that it can start to track
-092 * the modified procedures for the new 
wal file.
-093 * p/
-094 * The {@link #holdingCleanupTracker} is 
used to test whether we are safe to delete the oldest wal
-095 * file. When there are log rolling and 
there are more than 1 wal files, we will make use of it. It
-096 * will first be initialized to the 
oldest file's tracker(which is stored in the trailer), using the
-097 * method {@link 
ProcedureStoreTracker#resetTo(ProcedureStoreTracker, boolean)}, and then merge 
it
-098 * with the tracker of every newer wal 
files, using the
-099 * {@link 
ProcedureStoreTracker#setDeletedIfModifiedInBoth(ProcedureStoreTracker)}. If we 
find out
-100 * that all the modified procedures for 
the oldest wal file are modified or deleted in newer wal
-101 * files, then we can delete it.
-102 * @see ProcedureWALPrettyPrinter for 
printing content of a single WAL.
-103 * @see #main(String[]) to parse a 
directory of MasterWALProcs.
-104 */
-105@InterfaceAudience.Private
-106public class WALProcedureStore extends 
ProcedureStoreBase {
-107  private static final Logger LOG = 
LoggerFactory.getLogger(WALProcedureStore.class);
-108  public static final String LOG_PREFIX = 
"pv2-";
-109  /** Used to construct the name of the 
log directory for master procedures */
-110  public static final String 
MASTER_PROCEDURE_LOGDIR = "MasterProcWALs";
-111
-112
-113  public interface LeaseRecovery {
-114void recoverFileLease(FileSystem fs, 
Path path) throws IOException;
-115  }

[14/51] [partial] hbase-site git commit: Published site at 6bc7089f9e0793efc9bdd46a84f5ccd9bc4579ad.

2018-09-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/testdevapidocs/allclasses-frame.html
--
diff --git a/testdevapidocs/allclasses-frame.html 
b/testdevapidocs/allclasses-frame.html
index 57b963c..072fb4c 100644
--- a/testdevapidocs/allclasses-frame.html
+++ b/testdevapidocs/allclasses-frame.html
@@ -450,6 +450,7 @@
 ProcedureTestingUtility.LoadCounter
 ProcedureTestingUtility.NoopProcedure
 ProcedureTestingUtility.TestProcedure
+ProcedureTestUtil
 ProcedureWALLoaderPerformanceEvaluation
 ProcedureWALLoaderPerformanceEvaluation.LoadCounter
 ProcedureWALPerformanceEvaluation
@@ -480,15 +481,11 @@
 ResourceChecker.ResourceAnalyzer
 ResourceCheckerJUnitListener
 ResourceCheckerJUnitListener.AvailableMemoryMBResourceAnalyzer
-ResourceCheckerJUnitListener.GCCountResourceAnalyzer
-ResourceCheckerJUnitListener.GCTimeSecondResourceAnalyzer
 ResourceCheckerJUnitListener.MaxFileDescriptorResourceAnalyzer
-ResourceCheckerJUnitListener.MaxHeapMemoryMBResourceAnalyzer
 ResourceCheckerJUnitListener.OpenFileDescriptorResourceAnalyzer
 ResourceCheckerJUnitListener.ProcessCountResourceAnalyzer
 ResourceCheckerJUnitListener.SystemLoadAverageResourceAnalyzer
 ResourceCheckerJUnitListener.ThreadResourceAnalyzer
-ResourceCheckerJUnitListener.UsedHeapMemoryMBResourceAnalyzer
 RESTApiClusterManager
 RESTApiClusterManager.RoleCommand
 RESTApiClusterManager.Service
@@ -1530,6 +1527,8 @@
 TestMobStoreScanner
 TestModelBase
 TestModifyNamespaceProcedure
+TestModifyPeerProcedureRetryBackoff
+TestModifyPeerProcedureRetryBackoff.TestModifyPeerProcedure
 TestModifyTableProcedure
 TestModifyTableWhileMerging
 TestMovedRegionsCleaner
@@ -1681,6 +1680,8 @@
 TestProcedureReplayOrder.TestTwoStepProcedure
 TestProcedureSchedulerConcurrency
 TestProcedureSchedulerConcurrency.TestProcedureWithEvent
+TestProcedureSkipPersistence
+TestProcedureSkipPersistence.TestProcedure
 TestProcedureStoreTracker
 TestProcedureSuspended
 TestProcedureSuspended.TestLockProcedure

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/testdevapidocs/allclasses-noframe.html
--
diff --git a/testdevapidocs/allclasses-noframe.html 
b/testdevapidocs/allclasses-noframe.html
index eacc628..3b89d30 100644
--- a/testdevapidocs/allclasses-noframe.html
+++ b/testdevapidocs/allclasses-noframe.html
@@ -450,6 +450,7 @@
 ProcedureTestingUtility.LoadCounter
 ProcedureTestingUtility.NoopProcedure
 ProcedureTestingUtility.TestProcedure
+ProcedureTestUtil
 ProcedureWALLoaderPerformanceEvaluation
 ProcedureWALLoaderPerformanceEvaluation.LoadCounter
 ProcedureWALPerformanceEvaluation
@@ -480,15 +481,11 @@
 ResourceChecker.ResourceAnalyzer
 ResourceCheckerJUnitListener
 ResourceCheckerJUnitListener.AvailableMemoryMBResourceAnalyzer
-ResourceCheckerJUnitListener.GCCountResourceAnalyzer
-ResourceCheckerJUnitListener.GCTimeSecondResourceAnalyzer
 ResourceCheckerJUnitListener.MaxFileDescriptorResourceAnalyzer
-ResourceCheckerJUnitListener.MaxHeapMemoryMBResourceAnalyzer
 ResourceCheckerJUnitListener.OpenFileDescriptorResourceAnalyzer
 ResourceCheckerJUnitListener.ProcessCountResourceAnalyzer
 ResourceCheckerJUnitListener.SystemLoadAverageResourceAnalyzer
 ResourceCheckerJUnitListener.ThreadResourceAnalyzer
-ResourceCheckerJUnitListener.UsedHeapMemoryMBResourceAnalyzer
 RESTApiClusterManager
 RESTApiClusterManager.RoleCommand
 RESTApiClusterManager.Service
@@ -1530,6 +1527,8 @@
 TestMobStoreScanner
 TestModelBase
 TestModifyNamespaceProcedure
+TestModifyPeerProcedureRetryBackoff
+TestModifyPeerProcedureRetryBackoff.TestModifyPeerProcedure
 TestModifyTableProcedure
 TestModifyTableWhileMerging
 TestMovedRegionsCleaner
@@ -1681,6 +1680,8 @@
 TestProcedureReplayOrder.TestTwoStepProcedure
 TestProcedureSchedulerConcurrency
 TestProcedureSchedulerConcurrency.TestProcedureWithEvent
+TestProcedureSkipPersistence
+TestProcedureSkipPersistence.TestProcedure
 TestProcedureStoreTracker
 TestProcedureSuspended
 TestProcedureSuspended.TestLockProcedure



[14/51] [partial] hbase-site git commit: Published site at d7e08317d2f214e4cca7b67578aba0ed7a567d54.

2018-09-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37cf49a6/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
index 207ebcc..c3ef37e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
@@ -28,330 +28,332 @@
 020import java.io.IOException;
 021import java.util.Collections;
 022import java.util.List;
-023import org.apache.hadoop.hbase.Cell;
-024import 
org.apache.hadoop.hbase.CellBuilderFactory;
-025import 
org.apache.hadoop.hbase.CellBuilderType;
-026import 
org.apache.hadoop.hbase.HConstants;
-027import 
org.apache.hadoop.hbase.HRegionLocation;
-028import 
org.apache.hadoop.hbase.MetaTableAccessor;
-029import 
org.apache.hadoop.hbase.RegionLocations;
-030import 
org.apache.hadoop.hbase.ServerName;
-031import 
org.apache.hadoop.hbase.TableName;
-032import 
org.apache.hadoop.hbase.client.Put;
-033import 
org.apache.hadoop.hbase.client.RegionInfo;
-034import 
org.apache.hadoop.hbase.client.Result;
-035import 
org.apache.hadoop.hbase.client.Table;
-036import 
org.apache.hadoop.hbase.client.TableDescriptor;
-037import 
org.apache.hadoop.hbase.master.MasterFileSystem;
+023import org.apache.hadoop.fs.FileSystem;
+024import org.apache.hadoop.hbase.Cell;
+025import 
org.apache.hadoop.hbase.CellBuilderFactory;
+026import 
org.apache.hadoop.hbase.CellBuilderType;
+027import 
org.apache.hadoop.hbase.HConstants;
+028import 
org.apache.hadoop.hbase.HRegionLocation;
+029import 
org.apache.hadoop.hbase.MetaTableAccessor;
+030import 
org.apache.hadoop.hbase.RegionLocations;
+031import 
org.apache.hadoop.hbase.ServerName;
+032import 
org.apache.hadoop.hbase.TableName;
+033import 
org.apache.hadoop.hbase.client.Put;
+034import 
org.apache.hadoop.hbase.client.RegionInfo;
+035import 
org.apache.hadoop.hbase.client.Result;
+036import 
org.apache.hadoop.hbase.client.Table;
+037import 
org.apache.hadoop.hbase.client.TableDescriptor;
 038import 
org.apache.hadoop.hbase.master.MasterServices;
 039import 
org.apache.hadoop.hbase.master.RegionState.State;
 040import 
org.apache.hadoop.hbase.procedure2.Procedure;
 041import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
 042import 
org.apache.hadoop.hbase.util.Bytes;
 043import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-044import 
org.apache.hadoop.hbase.wal.WALSplitter;
-045import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047import 
org.apache.zookeeper.KeeperException;
-048import org.slf4j.Logger;
-049import org.slf4j.LoggerFactory;
-050
-051import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-052import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-053
-054/**
-055 * Store Region State to hbase:meta 
table.
-056 */
-057@InterfaceAudience.Private
-058public class RegionStateStore {
-059  private static final Logger LOG = 
LoggerFactory.getLogger(RegionStateStore.class);
-060
-061  /** The delimiter for meta columns for 
replicaIds gt; 0 */
-062  protected static final char 
META_REPLICA_ID_DELIMITER = '_';
-063
-064  private final MasterServices master;
-065
-066  public RegionStateStore(final 
MasterServices master) {
-067this.master = master;
-068  }
-069
-070  public interface RegionStateVisitor {
-071void visitRegionState(Result result, 
RegionInfo regionInfo, State state,
-072  ServerName regionLocation, 
ServerName lastHost, long openSeqNum);
-073  }
-074
-075  public void visitMeta(final 
RegionStateVisitor visitor) throws IOException {
-076
MetaTableAccessor.fullScanRegions(master.getConnection(), new 
MetaTableAccessor.Visitor() {
-077  final boolean isDebugEnabled = 
LOG.isDebugEnabled();
-078
-079  @Override
-080  public boolean visit(final Result 
r) throws IOException {
-081if (r !=  null  
!r.isEmpty()) {
-082  long st = 0;
-083  if (LOG.isTraceEnabled()) {
-084st = 
System.currentTimeMillis();
-085  }
-086  visitMetaEntry(visitor, r);
-087  if (LOG.isTraceEnabled()) {
-088long et = 
System.currentTimeMillis();
-089LOG.trace("[T] LOAD META PERF 
" + StringUtils.humanTimeDiff(et - st));
-090  }
-091} else if (isDebugEnabled) {
-092  LOG.debug("NULL result from 
meta - ignoring but this is strange.");
-093}
-094return true;
-095  }
-096});
-097  }
-098
-099  private void visitMetaEntry(final 
RegionStateVisitor visitor, final Result result)
-100  throws IOException {
-101final RegionLocations rl = 

[14/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104import 

[14/51] [partial] hbase-site git commit: Published site at cd161d976ef47b84e904f2d54bac65d2f3417c2a.

2018-09-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fa1bebf8/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
index df4d2d2..20442d4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
@@ -552,1331 +552,1334 @@
 544}
 545  }
 546
-547  public void assign(RegionInfo 
regionInfo, ServerName sn) throws IOException {
-548// TODO: should we use 
getRegionStateNode?
-549RegionStateNode regionNode = 
regionStates.getOrCreateRegionStateNode(regionInfo);
-550TransitRegionStateProcedure proc;
-551regionNode.lock();
-552try {
-553  preTransitCheck(regionNode, 
STATES_EXPECTED_ON_ASSIGN);
-554  proc = 
TransitRegionStateProcedure.assign(getProcedureEnvironment(), regionInfo, 
sn);
-555  regionNode.setProcedure(proc);
-556} finally {
-557  regionNode.unlock();
-558}
-559
ProcedureSyncWait.submitAndWaitProcedure(master.getMasterProcedureExecutor(), 
proc);
-560  }
-561
-562  public void assign(RegionInfo 
regionInfo) throws IOException {
-563assign(regionInfo, null);
-564  }
-565
-566  public void unassign(RegionInfo 
regionInfo) throws IOException {
-567RegionStateNode regionNode = 
regionStates.getRegionStateNode(regionInfo);
-568if (regionNode == null) {
-569  throw new 
UnknownRegionException("No RegionState found for " + 
regionInfo.getEncodedName());
-570}
-571TransitRegionStateProcedure proc;
-572regionNode.lock();
-573try {
-574  preTransitCheck(regionNode, 
STATES_EXPECTED_ON_UNASSIGN_OR_MOVE);
-575  proc = 
TransitRegionStateProcedure.unassign(getProcedureEnvironment(), regionInfo);
-576  regionNode.setProcedure(proc);
-577} finally {
-578  regionNode.unlock();
-579}
-580
ProcedureSyncWait.submitAndWaitProcedure(master.getMasterProcedureExecutor(), 
proc);
-581  }
-582
-583  private TransitRegionStateProcedure 
createMoveRegionProcedure(RegionInfo regionInfo,
-584  ServerName targetServer) throws 
HBaseIOException {
-585RegionStateNode regionNode = 
this.regionStates.getRegionStateNode(regionInfo);
-586if (regionNode == null) {
-587  throw new 
UnknownRegionException("No RegionState found for " + 
regionInfo.getEncodedName());
-588}
-589TransitRegionStateProcedure proc;
-590regionNode.lock();
-591try {
-592  preTransitCheck(regionNode, 
STATES_EXPECTED_ON_UNASSIGN_OR_MOVE);
-593  regionNode.checkOnline();
-594  proc = 
TransitRegionStateProcedure.move(getProcedureEnvironment(), regionInfo, 
targetServer);
-595  regionNode.setProcedure(proc);
-596} finally {
-597  regionNode.unlock();
-598}
-599return proc;
-600  }
-601
-602  public void move(RegionInfo regionInfo) 
throws IOException {
-603TransitRegionStateProcedure proc = 
createMoveRegionProcedure(regionInfo, null);
-604
ProcedureSyncWait.submitAndWaitProcedure(master.getMasterProcedureExecutor(), 
proc);
-605  }
-606
-607  public Futurebyte[] 
moveAsync(RegionPlan regionPlan) throws HBaseIOException {
-608TransitRegionStateProcedure proc =
-609  
createMoveRegionProcedure(regionPlan.getRegionInfo(), 
regionPlan.getDestination());
-610return 
ProcedureSyncWait.submitProcedure(master.getMasterProcedureExecutor(), proc);
-611  }
-612
-613  // 

-614  //  RegionTransition procedures 
helpers
-615  // 

-616
-617  /**
-618   * Create round-robin assigns. Use on 
table creation to distribute out regions across cluster.
-619   * @return AssignProcedures made out of 
the passed in codehris/code and a call to the balancer
-620   * to populate the assigns with 
targets chosen using round-robin (default balancer
-621   * scheme). If at assign-time, 
the target chosen is no longer up, thats fine, the
-622   * AssignProcedure will ask the 
balancer for a new target, and so on.
-623   */
-624  public TransitRegionStateProcedure[] 
createRoundRobinAssignProcedures(ListRegionInfo hris,
-625  ListServerName 
serversToExclude) {
-626if (hris.isEmpty()) {
-627  return new 
TransitRegionStateProcedure[0];
-628}
-629
-630if (serversToExclude != null
-631 
this.master.getServerManager().getOnlineServersList().size() == 1) {
-632  LOG.debug("Only one region server 
found and hence going ahead with the 

[14/51] [partial] hbase-site git commit: Published site at c6a65ba63fce85ac7c4b62b96ef2bbe6c35d2f00.

2018-09-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/293abb17/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
index 2f5e86e..475f5b2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
@@ -30,837 +30,841 @@
 022import java.util.ArrayList;
 023import java.util.Arrays;
 024import java.util.Collection;
-025import java.util.HashMap;
-026import java.util.List;
-027import java.util.Map;
-028import java.util.concurrent.Callable;
-029import 
java.util.concurrent.ExecutionException;
-030import 
java.util.concurrent.ExecutorService;
-031import java.util.concurrent.Executors;
-032import java.util.concurrent.Future;
-033import java.util.concurrent.TimeUnit;
-034import java.util.stream.Stream;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-039import 
org.apache.hadoop.hbase.HConstants;
-040import 
org.apache.hadoop.hbase.ServerName;
-041import 
org.apache.hadoop.hbase.TableName;
-042import 
org.apache.hadoop.hbase.UnknownRegionException;
-043import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-044import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-045import 
org.apache.hadoop.hbase.client.Mutation;
-046import 
org.apache.hadoop.hbase.client.RegionInfo;
-047import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-048import 
org.apache.hadoop.hbase.client.TableDescriptor;
-049import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-050import 
org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-051import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-052import 
org.apache.hadoop.hbase.master.RegionState.State;
-053import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-054import 
org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProcedure;
-055import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-056import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
-057import 
org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
-058import 
org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
-059import 
org.apache.hadoop.hbase.quotas.QuotaExceededException;
-060import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-061import 
org.apache.hadoop.hbase.regionserver.HStore;
-062import 
org.apache.hadoop.hbase.regionserver.HStoreFile;
-063import 
org.apache.hadoop.hbase.regionserver.RegionSplitPolicy;
-064import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-065import 
org.apache.hadoop.hbase.util.Bytes;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.hadoop.hbase.util.FSUtils;
-068import 
org.apache.hadoop.hbase.util.Pair;
-069import 
org.apache.hadoop.hbase.util.Threads;
-070import 
org.apache.hadoop.hbase.wal.WALSplitter;
-071import 
org.apache.hadoop.util.ReflectionUtils;
-072import 
org.apache.yetus.audience.InterfaceAudience;
-073import org.slf4j.Logger;
-074import org.slf4j.LoggerFactory;
-075
-076import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-077
-078import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionState;
-082
-083/**
-084 * The procedure to split a region in a 
table.
-085 * Takes lock on the parent region.
-086 * It holds the lock for the life of the 
procedure.
-087 * pThrows exception on 
construction if determines context hostile to spllt (cluster going
-088 * down or master is shutting down or 
table is disabled)./p
-089 */
-090@InterfaceAudience.Private
-091public class SplitTableRegionProcedure
-092extends 
AbstractStateMachineRegionProcedureSplitTableRegionState {
-093  private static final Logger LOG = 
LoggerFactory.getLogger(SplitTableRegionProcedure.class);
-094  private Boolean traceEnabled = null;
-095  private RegionInfo daughter_1_RI;
-096  private RegionInfo daughter_2_RI;
-097  private byte[] bestSplitRow;
-098  private RegionSplitPolicy 
splitPolicy;
-099
-100  public SplitTableRegionProcedure() {
-101// Required by the Procedure 
framework to create the procedure on replay
-102  }
-103
-104  public SplitTableRegionProcedure(final 
MasterProcedureEnv env,
-105  final RegionInfo regionToSplit, 
final byte[] 

[14/51] [partial] hbase-site git commit: Published site at 7c1fad4992a169a35b4457e6f4afcb30d04406e9.

2018-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/Procedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/Procedure.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/Procedure.html
index 3cfacfc..c081310 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/Procedure.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/Procedure.html
@@ -153,858 +153,901 @@
 145  private boolean lockedWhenLoading = 
false;
 146
 147  /**
-148   * The main code of the procedure. It 
must be idempotent since execute()
-149   * may be called multiple times in case 
of machine failure in the middle
-150   * of the execution.
-151   * @param env the environment passed to 
the ProcedureExecutor
-152   * @return a set of sub-procedures to 
run or ourselves if there is more work to do or null if the
-153   * procedure is done.
-154   * @throws ProcedureYieldException the 
procedure will be added back to the queue and retried later.
-155   * @throws InterruptedException the 
procedure will be added back to the queue and retried later.
-156   * @throws ProcedureSuspendedException 
Signal to the executor that Procedure has suspended itself and
-157   * has set itself up waiting for an 
external event to wake it back up again.
-158   */
-159  protected abstract 
ProcedureTEnvironment[] execute(TEnvironment env)
-160throws ProcedureYieldException, 
ProcedureSuspendedException, InterruptedException;
-161
-162  /**
-163   * The code to undo what was done by 
the execute() code.
-164   * It is called when the procedure or 
one of the sub-procedures failed or an
-165   * abort was requested. It should 
cleanup all the resources created by
-166   * the execute() call. The 
implementation must be idempotent since rollback()
-167   * may be called multiple time in case 
of machine failure in the middle
-168   * of the execution.
-169   * @param env the environment passed to 
the ProcedureExecutor
-170   * @throws IOException temporary 
failure, the rollback will retry later
-171   * @throws InterruptedException the 
procedure will be added back to the queue and retried later
-172   */
-173  protected abstract void 
rollback(TEnvironment env)
-174throws IOException, 
InterruptedException;
-175
-176  /**
-177   * The abort() call is asynchronous and 
each procedure must decide how to deal
-178   * with it, if they want to be 
abortable. The simplest implementation
-179   * is to have an AtomicBoolean set in 
the abort() method and then the execute()
-180   * will check if the abort flag is set 
or not.
-181   * abort() may be called multiple times 
from the client, so the implementation
-182   * must be idempotent.
-183   *
-184   * pNOTE: abort() is not like 
Thread.interrupt(). It is just a notification
-185   * that allows the procedure 
implementor abort.
-186   */
-187  protected abstract boolean 
abort(TEnvironment env);
-188
-189  /**
-190   * The user-level code of the procedure 
may have some state to
-191   * persist (e.g. input arguments or 
current position in the processing state) to
-192   * be able to resume on failure.
-193   * @param serializer stores the 
serializable state
-194   */
-195  protected abstract void 
serializeStateData(ProcedureStateSerializer serializer)
-196throws IOException;
-197
-198  /**
-199   * Called on store load to allow the 
user to decode the previously serialized
-200   * state.
-201   * @param serializer contains the 
serialized state
-202   */
-203  protected abstract void 
deserializeStateData(ProcedureStateSerializer serializer)
-204throws IOException;
-205
-206  /**
-207   * The {@link #doAcquireLock(Object, 
ProcedureStore)} will be split into two steps, first, it will
-208   * call us to determine whether we need 
to wait for initialization, second, it will call
-209   * {@link #acquireLock(Object)} to 
actually handle the lock for this procedure.
-210   * p/
-211   * This is because that when master 
restarts, we need to restore the lock state for all the
-212   * procedures to not break the semantic 
if {@link #holdLock(Object)} is true. But the
-213   * {@link ProcedureExecutor} will be 
started before the master finish initialization(as it is part
-214   * of the initialization!), so we need 
to split the code into two steps, and when restore, we just
-215   * restore the lock part and ignore the 
waitInitialized part. Otherwise there will be dead lock.
-216   * @return true means we need to wait 
until the environment has been initialized, otherwise true.
-217   */
-218  protected boolean 
waitInitialized(TEnvironment env) {
-219return false;
-220  }
-221
-222  /**
-223   * The user should override this method 
if they need a lock on an Entity. A lock can be anything,
-224   * and it is up to the implementor. The 
Procedure Framework will call this method just 

[14/51] [partial] hbase-site git commit: Published site at 3afe9fb7e6ebfa71187cbe131558a83fae61cecd.

2018-08-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
index 938004a..dbd4c1b 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class HBaseTestingUtility.PortAllocator
+static class HBaseTestingUtility.PortAllocator
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -250,7 +250,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MIN_RANDOM_PORT
-private static finalint MIN_RANDOM_PORT
+private static finalint MIN_RANDOM_PORT
 
 See Also:
 Constant
 Field Values
@@ -263,7 +263,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MAX_RANDOM_PORT
-private static finalint MAX_RANDOM_PORT
+private static finalint MAX_RANDOM_PORT
 
 See Also:
 Constant
 Field Values
@@ -276,7 +276,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 takenRandomPorts
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer takenRandomPorts
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer takenRandomPorts
 A set of ports that have been claimed using randomFreePort().
 
 
@@ -286,7 +286,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 random
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random random
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random random
 
 
 
@@ -295,7 +295,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 portChecker
-private finalHBaseTestingUtility.PortAllocator.AvailablePortChecker
 portChecker
+private finalHBaseTestingUtility.PortAllocator.AvailablePortChecker
 portChecker
 
 
 
@@ -312,7 +312,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 PortAllocator
-publicPortAllocator(https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Randomrandom)
+publicPortAllocator(https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Randomrandom)
 
 
 
@@ -321,7 +321,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 PortAllocator
-publicPortAllocator(https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Randomrandom,
+publicPortAllocator(https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Randomrandom,
  HBaseTestingUtility.PortAllocator.AvailablePortCheckerportChecker)
 
 
@@ -339,7 +339,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 randomFreePort
-publicintrandomFreePort()
+publicintrandomFreePort()
 Returns a random free port and marks that port as taken. 
Not thread-safe. Expected to be
  called from single-threaded test setup code/
 
@@ -350,7 +350,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 randomPort
-privateintrandomPort()
+privateintrandomPort()
 Returns a random port. These ports cannot be registered 
with IANA and are
  intended for dynamic allocation (see http://bit.ly/dynports).
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.SeenRowTracker.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.SeenRowTracker.html
 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.SeenRowTracker.html
index a668ca4..360156e 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.SeenRowTracker.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.SeenRowTracker.html
@@ -113,7 +113,7 @@ var activeTableTab = 

[14/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html 
b/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
index 745b957..c69ff75 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RowRange.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public static class MultiRowRangeFilter.RowRange
+public static class MultiRowRangeFilter.RowRange
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableMultiRowRangeFilter.RowRange
 
@@ -217,22 +217,30 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 intlength)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 byte[]
 getStartRow()
 
-
+
 byte[]
 getStopRow()
 
-
+
+int
+hashCode()
+
+
 boolean
 isStartRowInclusive()
 
-
+
 boolean
 isStopRowInclusive()
 
-
+
 boolean
 isValid()
 
@@ -242,7 +250,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Ob
 ject.html?is-external=true#wait--" title="class or interface in 
java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 

[14/51] [partial] hbase-site git commit: Published site at 6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4.

2018-08-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
index 3559952..bd7445a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -359,2396 +359,2401 @@
 351switch (inMemoryCompaction) {
 352  case NONE:
 353ms = 
ReflectionUtils.newInstance(DefaultMemStore.class,
-354new Object[]{conf, 
this.comparator});
-355break;
-356  default:
-357Class? extends 
CompactingMemStore clz = conf.getClass(MEMSTORE_CLASS_NAME,
-358CompactingMemStore.class, 
CompactingMemStore.class);
-359ms = 
ReflectionUtils.newInstance(clz, new Object[]{conf, this.comparator, this,
-360
this.getHRegion().getRegionServicesForStores(), inMemoryCompaction});
-361}
-362return ms;
-363  }
-364
-365  /**
-366   * Creates the cache config.
-367   * @param family The current column 
family.
-368   */
-369  protected void createCacheConf(final 
ColumnFamilyDescriptor family) {
-370this.cacheConf = new 
CacheConfig(conf, family);
-371  }
-372
-373  /**
-374   * Creates the store engine configured 
for the given Store.
-375   * @param store The store. An 
unfortunate dependency needed due to it
-376   *  being passed to 
coprocessors via the compactor.
-377   * @param conf Store configuration.
-378   * @param kvComparator KVComparator for 
storeFileManager.
-379   * @return StoreEngine to use.
-380   */
-381  protected StoreEngine?, ?, ?, ? 
createStoreEngine(HStore store, Configuration conf,
-382  CellComparator kvComparator) throws 
IOException {
-383return StoreEngine.create(store, 
conf, comparator);
-384  }
-385
-386  /**
-387   * @param family
-388   * @return TTL in seconds of the 
specified family
-389   */
-390  public static long 
determineTTLFromFamily(final ColumnFamilyDescriptor family) {
-391// HCD.getTimeToLive returns ttl in 
seconds.  Convert to milliseconds.
-392long ttl = family.getTimeToLive();
-393if (ttl == HConstants.FOREVER) {
-394  // Default is unlimited ttl.
-395  ttl = Long.MAX_VALUE;
-396} else if (ttl == -1) {
-397  ttl = Long.MAX_VALUE;
-398} else {
-399  // Second - ms adjust for user 
data
-400  ttl *= 1000;
-401}
-402return ttl;
-403  }
-404
-405  @Override
-406  public String getColumnFamilyName() {
-407return 
this.family.getNameAsString();
-408  }
-409
-410  @Override
-411  public TableName getTableName() {
-412return 
this.getRegionInfo().getTable();
-413  }
-414
-415  @Override
-416  public FileSystem getFileSystem() {
-417return this.fs.getFileSystem();
-418  }
-419
-420  public HRegionFileSystem 
getRegionFileSystem() {
-421return this.fs;
-422  }
-423
-424  /* Implementation of 
StoreConfigInformation */
-425  @Override
-426  public long getStoreFileTtl() {
-427// TTL only applies if there's no 
MIN_VERSIONs setting on the column.
-428return 
(this.scanInfo.getMinVersions() == 0) ? this.scanInfo.getTtl() : 
Long.MAX_VALUE;
-429  }
-430
-431  @Override
-432  public long getMemStoreFlushSize() {
-433// TODO: Why is this in here?  The 
flushsize of the region rather than the store?  St.Ack
-434return 
this.region.memstoreFlushSize;
-435  }
-436
-437  @Override
-438  public MemStoreSize getFlushableSize() 
{
-439return 
this.memstore.getFlushableSize();
-440  }
-441
-442  @Override
-443  public MemStoreSize getSnapshotSize() 
{
-444return 
this.memstore.getSnapshotSize();
-445  }
-446
-447  @Override
-448  public long 
getCompactionCheckMultiplier() {
-449return 
this.compactionCheckMultiplier;
-450  }
-451
-452  @Override
-453  public long getBlockingFileCount() {
-454return blockingFileCount;
-455  }
-456  /* End implementation of 
StoreConfigInformation */
-457
-458  /**
-459   * Returns the configured 
bytesPerChecksum value.
-460   * @param conf The configuration
-461   * @return The bytesPerChecksum that is 
set in the configuration
-462   */
-463  public static int 
getBytesPerChecksum(Configuration conf) {
-464return 
conf.getInt(HConstants.BYTES_PER_CHECKSUM,
-465   
HFile.DEFAULT_BYTES_PER_CHECKSUM);
-466  }
-467
-468  /**
-469   * Returns the configured checksum 
algorithm.
-470   * @param conf The configuration
-471   * @return The checksum algorithm that 
is set in the configuration
-472   */
-473  public static ChecksumType 
getChecksumType(Configuration conf) {
-474String checksumName = 
conf.get(HConstants.CHECKSUM_TYPE_NAME);
-475if (checksumName == null) {
-476  return 
ChecksumType.getDefaultChecksumType();
-477} else {
-478  return 

[14/51] [partial] hbase-site git commit: Published site at 63f2d3cbdc8151f5f61f33e0a078c51b9ac076a5.

2018-08-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/InitMetaProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/InitMetaProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/InitMetaProcedure.html
index 5c286c0..5b73b0a 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/InitMetaProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/InitMetaProcedure.html
@@ -129,10 +129,10 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class InitMetaProcedure
+public class InitMetaProcedure
 extends AbstractStateMachineTableProcedureorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaState
 This procedure is used to initialize meta table for a new 
hbase deploy. It will just schedule an
- AssignProcedure to 
assign meta.
+ TransitRegionStateProcedure
 to assign meta.
 
 
 
@@ -359,7 +359,7 @@ extends 
 
 latch
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
 title="class or interface in java.util.concurrent">CountDownLatch latch
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
 title="class or interface in java.util.concurrent">CountDownLatch latch
 
 
 
@@ -376,7 +376,7 @@ extends 
 
 InitMetaProcedure
-publicInitMetaProcedure()
+publicInitMetaProcedure()
 
 
 
@@ -393,7 +393,7 @@ extends 
 
 getTableName
-publicTableNamegetTableName()
+publicTableNamegetTableName()
 
 Specified by:
 getTableNamein
 interfaceTableProcedureInterface
@@ -410,7 +410,7 @@ extends 
 
 getTableOperationType
-publicTableProcedureInterface.TableOperationTypegetTableOperationType()
+publicTableProcedureInterface.TableOperationTypegetTableOperationType()
 Description copied from 
interface:TableProcedureInterface
 Given an operation type we can take decisions about what to 
do with pending operations.
  e.g. if we get a delete and we have some table operation pending (e.g. add 
column)
@@ -431,7 +431,7 @@ extends 
 
 executeFromState
-protectedStateMachineProcedure.FlowexecuteFromState(MasterProcedureEnvenv,
+protectedStateMachineProcedure.FlowexecuteFromState(MasterProcedureEnvenv,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaStatestate)
throws ProcedureSuspendedException,
   ProcedureYieldException,
@@ -458,7 +458,7 @@ extends 
 
 waitInitialized
-protectedbooleanwaitInitialized(MasterProcedureEnvenv)
+protectedbooleanwaitInitialized(MasterProcedureEnvenv)
 Description copied from 
class:Procedure
 The Procedure.doAcquireLock(Object,
 ProcedureStore) will be split into two steps, first, it will
  call us to determine whether we need to wait for initialization, second, it 
will call
@@ -483,7 +483,7 @@ extends 
 
 acquireLock
-protectedProcedure.LockStateacquireLock(MasterProcedureEnvenv)
+protectedProcedure.LockStateacquireLock(MasterProcedureEnvenv)
 Description copied from 
class:Procedure
 The user should override this method if they need a lock on 
an Entity. A lock can be anything,
  and it is up to the implementor. The Procedure Framework will call this 
method just before it
@@ -520,7 +520,7 @@ extends 
 
 rollbackState
-protectedvoidrollbackState(MasterProcedureEnvenv,
+protectedvoidrollbackState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaStatestate)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
  https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
@@ -542,7 +542,7 @@ extends 
 
 getState
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaStategetState(intstateId)
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaStategetState(intstateId)
 Description copied from 
class:StateMachineProcedure
 Convert an ordinal (or state id) to an Enum (or more 
descriptive) state object.
 
@@ -561,7 +561,7 @@ extends 
 
 getStateId
-protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaStatestate)
+protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.InitMetaStatestate)
 Description copied from 
class:StateMachineProcedure
 Convert the Enum (or more descriptive) state object to an 
ordinal (or state id).
 
@@ -580,7 +580,7 @@ extends 
 
 getInitialState

[14/51] [partial] hbase-site git commit: Published site at 092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.

2018-08-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
index 63e4b46..514f830 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
@@ -468,15 +468,15 @@
 460   * creating it if necessary.
 461   * @param logEntry
 462   * @param fileNameBeingSplit the file 
being split currently. Used to generate tmp file name.
-463   * @param conf
-464   * @return Path to file into which to 
dump split log edits.
-465   * @throws IOException
-466   */
-467  @SuppressWarnings("deprecation")
-468  @VisibleForTesting
-469  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
-470  Configuration conf)
-471  throws IOException {
+463   * @param tmpDirName of the directory 
used to sideline old recovered edits file
+464   * @param conf
+465   * @return Path to file into which to 
dump split log edits.
+466   * @throws IOException
+467   */
+468  @SuppressWarnings("deprecation")
+469  @VisibleForTesting
+470  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
+471  String tmpDirName, Configuration 
conf) throws IOException {
 472FileSystem fs = 
FileSystem.get(conf);
 473Path rootDir = 
FSUtils.getRootDir(conf);
 474Path tableDir = 
FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName());
@@ -491,7 +491,7 @@
 483  return null;
 484}
 485if (fs.exists(dir)  
fs.isFile(dir)) {
-486  Path tmp = new Path("/tmp");
+486  Path tmp = new Path(tmpDirName);
 487  if (!fs.exists(tmp)) {
 488fs.mkdirs(tmp);
 489  }
@@ -1520,411 +1520,413 @@
 1512 * @return a path with a write for 
that path. caller should close.
 1513 */
 1514WriterAndPath createWAP(byte[] 
region, Entry entry) throws IOException {
-1515  Path regionedits = 
getRegionSplitEditsPath(entry,
-1516  
fileBeingSplit.getPath().getName(), conf);
-1517  if (regionedits == null) {
-1518return null;
-1519  }
-1520  FileSystem rootFs = 
FileSystem.get(conf);
-1521  if (rootFs.exists(regionedits)) 
{
-1522LOG.warn("Found old edits file. 
It could be the "
-1523+ "result of a previous 
failed split attempt. Deleting " + regionedits + ", length="
-1524+ 
rootFs.getFileStatus(regionedits).getLen());
-1525if (!rootFs.delete(regionedits, 
false)) {
-1526  LOG.warn("Failed delete of old 
{}", regionedits);
-1527}
-1528  }
-1529  Writer w = 
createWriter(regionedits);
-1530  LOG.debug("Creating writer 
path={}", regionedits);
-1531  return new 
WriterAndPath(regionedits, w, entry.getKey().getSequenceId());
-1532}
-1533
-1534void filterCellByStore(Entry 
logEntry) {
-1535  Mapbyte[], Long 
maxSeqIdInStores =
-1536  
regionMaxSeqIdInStores.get(Bytes.toString(logEntry.getKey().getEncodedRegionName()));
-1537  if 
(MapUtils.isEmpty(maxSeqIdInStores)) {
-1538return;
-1539  }
-1540  // Create the array list for the 
cells that aren't filtered.
-1541  // We make the assumption that 
most cells will be kept.
-1542  ArrayListCell keptCells = 
new ArrayList(logEntry.getEdit().getCells().size());
-1543  for (Cell cell : 
logEntry.getEdit().getCells()) {
-1544if 
(CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
-1545  keptCells.add(cell);
-1546} else {
-1547  byte[] family = 
CellUtil.cloneFamily(cell);
-1548  Long maxSeqId = 
maxSeqIdInStores.get(family);
-1549  // Do not skip cell even if 
maxSeqId is null. Maybe we are in a rolling upgrade,
-1550  // or the master was crashed 
before and we can not get the information.
-1551  if (maxSeqId == null || 
maxSeqId.longValue()  logEntry.getKey().getSequenceId()) {
-1552keptCells.add(cell);
-1553  }
-1554}
-1555  }
-1556
-1557  // Anything in the keptCells array 
list is still live.
-1558  // So rather than removing the 
cells from the array list
-1559  // which would be an O(n^2) 
operation, we just replace the list
-1560  
logEntry.getEdit().setCells(keptCells);
-1561}
-1562
-1563@Override
-1564public void append(RegionEntryBuffer 
buffer) throws IOException {
-1565  appendBuffer(buffer, true);
-1566}
-1567
-1568WriterAndPath 
appendBuffer(RegionEntryBuffer buffer, boolean reusable) throws IOException{
-1569  ListEntry entries = 
buffer.entryBuffer;
-1570  if (entries.isEmpty()) {
-1571LOG.warn("got an empty buffer, 
skipping");
-1572return null;

[14/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

2018-08-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
index bd3c59e..21e240a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
@@ -33,62 +33,62 @@
 025import java.io.FileNotFoundException;
 026import java.io.FileOutputStream;
 027import java.io.IOException;
-028import java.io.ObjectInputStream;
-029import java.io.ObjectOutputStream;
-030import java.io.Serializable;
-031import java.nio.ByteBuffer;
-032import java.util.ArrayList;
-033import java.util.Comparator;
-034import java.util.HashSet;
-035import java.util.Iterator;
-036import java.util.List;
-037import java.util.Map;
-038import java.util.NavigableSet;
-039import java.util.PriorityQueue;
-040import java.util.Set;
-041import 
java.util.concurrent.ArrayBlockingQueue;
-042import 
java.util.concurrent.BlockingQueue;
-043import 
java.util.concurrent.ConcurrentHashMap;
-044import 
java.util.concurrent.ConcurrentMap;
-045import 
java.util.concurrent.ConcurrentSkipListSet;
-046import java.util.concurrent.Executors;
-047import 
java.util.concurrent.ScheduledExecutorService;
-048import java.util.concurrent.TimeUnit;
-049import 
java.util.concurrent.atomic.AtomicInteger;
-050import 
java.util.concurrent.atomic.AtomicLong;
-051import 
java.util.concurrent.atomic.LongAdder;
-052import java.util.concurrent.locks.Lock;
-053import 
java.util.concurrent.locks.ReentrantLock;
-054import 
java.util.concurrent.locks.ReentrantReadWriteLock;
-055import 
org.apache.hadoop.conf.Configuration;
-056import 
org.apache.hadoop.hbase.HBaseConfiguration;
-057import 
org.apache.hadoop.hbase.io.HeapSize;
-058import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-059import 
org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
-060import 
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
-061import 
org.apache.hadoop.hbase.io.hfile.BlockPriority;
-062import 
org.apache.hadoop.hbase.io.hfile.BlockType;
-063import 
org.apache.hadoop.hbase.io.hfile.CacheStats;
-064import 
org.apache.hadoop.hbase.io.hfile.Cacheable;
-065import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-066import 
org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
-067import 
org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
-068import 
org.apache.hadoop.hbase.io.hfile.CachedBlock;
-069import 
org.apache.hadoop.hbase.io.hfile.HFileBlock;
-070import 
org.apache.hadoop.hbase.nio.ByteBuff;
-071import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-072import 
org.apache.hadoop.hbase.util.HasThread;
-073import 
org.apache.hadoop.hbase.util.IdReadWriteLock;
-074import 
org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;
-075import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-076import 
org.apache.hadoop.util.StringUtils;
-077import 
org.apache.yetus.audience.InterfaceAudience;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-082import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-083import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+028import java.io.Serializable;
+029import java.nio.ByteBuffer;
+030import java.util.ArrayList;
+031import java.util.Comparator;
+032import java.util.HashSet;
+033import java.util.Iterator;
+034import java.util.List;
+035import java.util.Map;
+036import java.util.NavigableSet;
+037import java.util.PriorityQueue;
+038import java.util.Set;
+039import 
java.util.concurrent.ArrayBlockingQueue;
+040import 
java.util.concurrent.BlockingQueue;
+041import 
java.util.concurrent.ConcurrentHashMap;
+042import 
java.util.concurrent.ConcurrentMap;
+043import 
java.util.concurrent.ConcurrentSkipListSet;
+044import java.util.concurrent.Executors;
+045import 
java.util.concurrent.ScheduledExecutorService;
+046import java.util.concurrent.TimeUnit;
+047import 
java.util.concurrent.atomic.AtomicInteger;
+048import 
java.util.concurrent.atomic.AtomicLong;
+049import 
java.util.concurrent.atomic.LongAdder;
+050import java.util.concurrent.locks.Lock;
+051import 
java.util.concurrent.locks.ReentrantLock;
+052import 
java.util.concurrent.locks.ReentrantReadWriteLock;
+053import 
org.apache.hadoop.conf.Configuration;
+054import 
org.apache.hadoop.hbase.HBaseConfiguration;
+055import 
org.apache.hadoop.hbase.io.HeapSize;
+056import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
+057import 
org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
+058import 
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
+059import 
org.apache.hadoop.hbase.io.hfile.BlockPriority;

[14/51] [partial] hbase-site git commit: Published site at ba5d1c1f28301adc99019d9d6c4a04fac98ae511.

2018-07-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.CompletedProcedureRetainer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.CompletedProcedureRetainer.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.CompletedProcedureRetainer.html
index 7e90abb..eb6b145 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.CompletedProcedureRetainer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.CompletedProcedureRetainer.html
@@ -103,13 +103,13 @@
 
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentHashMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainer
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentHashMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainerTEnvironment
 ProcedureExecutor.completed
 Map the the procId returned by submitProcedure(), the 
Root-ProcID, to the Procedure.
 
 
 
-private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainer
+private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainerTEnvironment
 ProcedureExecutor.CompletedProcedureCleaner.completed
 
 
@@ -123,7 +123,7 @@
 
 CompletedProcedureCleaner(org.apache.hadoop.conf.Configurationconf,
  ProcedureStorestore,
- https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainercompletedMap,
+ https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainerTEnvironmentcompletedMap,
  https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapNonceKey,https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in 
java.lang">LongnonceKeysToProcIdsMap)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
index 80172d9..4348761 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureExecutor.html
@@ -188,12 +188,12 @@
 
 static byte[]
 ProcedureSyncWait.submitAndWaitProcedure(ProcedureExecutorMasterProcedureEnvprocExec,
-  Procedure?proc)
+  ProcedureMasterProcedureEnvproc)
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in 
java.util.concurrent">Futurebyte[]
 ProcedureSyncWait.submitProcedure(ProcedureExecutorMasterProcedureEnvprocExec,
-   Procedure?proc)
+   ProcedureMasterProcedureEnvproc)
 
 
 static byte[]
@@ -234,7 +234,7 @@
 
 
 
-private ProcedureExecutor?
+private ProcedureExecutorTEnvironment
 TimeoutExecutorThread.executor
 
 
@@ -246,7 +246,7 @@
 
 
 
-TimeoutExecutorThread(ProcedureExecutor?executor,
+TimeoutExecutorThread(ProcedureExecutorTEnvironmentexecutor,
  https://docs.oracle.com/javase/8/docs/api/java/lang/ThreadGroup.html?is-external=true;
 title="class or interface in 

[14/51] [partial] hbase-site git commit: Published site at b4759ce6e72f50ccd9d410bd5917dc5a515414f1.

2018-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaCandidateGenerator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaCandidateGenerator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaCandidateGenerator.html
index 233dba3..91b9055 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaCandidateGenerator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaCandidateGenerator.html
@@ -540,1205 +540,1204 @@
 532  
sm.getRegionMetrics().forEach((byte[] regionName, RegionMetrics rm) - {
 533DequeBalancerRegionLoad 
rLoads = oldLoads.get(Bytes.toString(regionName));
 534if (rLoads == null) {
-535  // There was nothing there
-536  rLoads = new 
ArrayDeque();
-537} else if (rLoads.size() = 
numRegionLoadsToRemember) {
-538  rLoads.remove();
-539}
-540rLoads.add(new 
BalancerRegionLoad(rm));
-541
loads.put(Bytes.toString(regionName), rLoads);
-542  });
-543});
-544
-545for(CostFromRegionLoadFunction cost : 
regionLoadFunctions) {
-546  cost.setLoads(loads);
-547}
-548  }
-549
-550  protected void initCosts(Cluster 
cluster) {
-551for (CostFunction c:costFunctions) 
{
-552  c.init(cluster);
-553}
-554  }
-555
-556  protected void 
updateCostsWithAction(Cluster cluster, Action action) {
-557for (CostFunction c : costFunctions) 
{
-558  c.postAction(action);
-559}
-560  }
-561
-562  /**
-563   * Get the names of the cost 
functions
-564   */
-565  public String[] getCostFunctionNames() 
{
-566if (costFunctions == null) return 
null;
-567String[] ret = new 
String[costFunctions.length];
-568for (int i = 0; i  
costFunctions.length; i++) {
-569  CostFunction c = 
costFunctions[i];
-570  ret[i] = 
c.getClass().getSimpleName();
-571}
-572
-573return ret;
-574  }
-575
-576  /**
-577   * This is the main cost function.  It 
will compute a cost associated with a proposed cluster
-578   * state.  All different costs will be 
combined with their multipliers to produce a double cost.
-579   *
-580   * @param cluster The state of the 
cluster
-581   * @param previousCost the previous 
cost. This is used as an early out.
-582   * @return a double of a cost 
associated with the proposed cluster state.  This cost is an
-583   * aggregate of all individual 
cost functions.
-584   */
-585  protected double computeCost(Cluster 
cluster, double previousCost) {
-586double total = 0;
-587
-588for (int i = 0; i  
costFunctions.length; i++) {
-589  CostFunction c = 
costFunctions[i];
-590  this.tempFunctionCosts[i] = 0.0;
-591
-592  if (c.getMultiplier() = 0) {
-593continue;
-594  }
-595
-596  Float multiplier = 
c.getMultiplier();
-597  Double cost = c.cost();
-598
-599  this.tempFunctionCosts[i] = 
multiplier*cost;
-600  total += 
this.tempFunctionCosts[i];
-601
-602  if (total  previousCost) {
-603break;
-604  }
-605}
-606
-607return total;
-608  }
-609
-610  /** Generates a candidate action to be 
applied to the cluster for cost function search */
-611  abstract static class 
CandidateGenerator {
-612abstract Cluster.Action 
generate(Cluster cluster);
-613
-614/**
-615 * From a list of regions pick a 
random one. Null can be returned which
-616 * {@link 
StochasticLoadBalancer#balanceCluster(Map)} recognize as signal to try a region 
move
-617 * rather than swap.
-618 *
-619 * @param clusterThe state of 
the cluster
-620 * @param server index of the 
server
-621 * @param chanceOfNoSwap Chance that 
this will decide to try a move rather
-622 *   than a 
swap.
-623 * @return a random {@link 
RegionInfo} or null if an asymmetrical move is
-624 * suggested.
-625 */
-626protected int 
pickRandomRegion(Cluster cluster, int server, double chanceOfNoSwap) {
-627  // Check to see if this is just a 
move.
-628  if 
(cluster.regionsPerServer[server].length == 0 || RANDOM.nextFloat()  
chanceOfNoSwap) {
-629// signal a move only.
-630return -1;
-631  }
-632  int rand = 
RANDOM.nextInt(cluster.regionsPerServer[server].length);
-633  return 
cluster.regionsPerServer[server][rand];
-634
-635}
-636protected int 
pickRandomServer(Cluster cluster) {
-637  if (cluster.numServers  1) {
-638return -1;
-639  }
-640
-641  return 
RANDOM.nextInt(cluster.numServers);
-642}
-643
-644protected int pickRandomRack(Cluster 
cluster) {
-645  if (cluster.numRacks 

[14/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0c6f447e/apidocs/org/apache/hadoop/hbase/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/TableName.html 
b/apidocs/org/apache/hadoop/hbase/TableName.html
index 9efaa27..79df019 100644
--- a/apidocs/org/apache/hadoop/hbase/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/TableName.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -19,45 +19,45 @@
 }
 //-->
 var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":10,"i19":10,"i20":10,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9};
-var tabs = 
{65535:["t0","所有方法"],1:["t1","静态方法"],2:["t2","实例方法"],8:["t4","å
…·ä½“方法"]};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
 var activeTableTab = "activeTableTab";
 
 
-您的浏览器已禁用 JavaScript。
+JavaScript is disabled on your browser.
 
 
 
 
 
-跳过导航链接
+Skip navigation links
 
 
 
-
-概览
-程序包
-ç±»
-使用
-树
-已过时
-索引
-帮助
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
 
 
 
 
-上一个类
-下一个类
+PrevClass
+NextClass
 
 
-框架
-无框架
+Frames
+NoFrames
 
 
-所有类
+AllClasses
 
 
 

[14/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5427a45e/apidocs/org/apache/hadoop/hbase/TableInfoMissingException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/TableInfoMissingException.html 
b/apidocs/org/apache/hadoop/hbase/TableInfoMissingException.html
index 31b337d..91c0977 100644
--- a/apidocs/org/apache/hadoop/hbase/TableInfoMissingException.html
+++ b/apidocs/org/apache/hadoop/hbase/TableInfoMissingException.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -20,38 +20,38 @@
 //-->
 
 
-JavaScript is disabled on your browser.
+您的浏览器已禁用 JavaScript。
 
 
 
 
 
-Skip navigation links
+跳过导航链接
 
 
 
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
+
+概览
+程序包
+ç±»
+使用
+树
+已过时
+索引
+帮助
 
 
 
 
-PrevClass
-NextClass
+上一个类
+下一个类
 
 
-Frames
-NoFrames
+框架
+无框架
 
 
-AllClasses
+所有类
 
 
 

[14/51] [partial] hbase-site git commit: Published site at 0f23784182ab88649de340d75804e0ff20dcd0fc.

2018-07-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
index 05e032c..40ef9f4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
@@ -25,767 +25,805 @@
 017 */
 018package 
org.apache.hadoop.hbase.io.asyncfs;
 019
-020import static 
org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
-021import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
+020import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
+021import static 
org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
 022
-023import 
org.apache.hbase.thirdparty.com.google.common.base.Charsets;
-024import 
org.apache.hbase.thirdparty.com.google.common.base.Throwables;
-025import 
org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
-026import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-027import 
com.google.protobuf.CodedOutputStream;
-028
-029import 
org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
-030import 
org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;
-031import 
org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf;
-032import 
org.apache.hbase.thirdparty.io.netty.buffer.Unpooled;
-033import 
org.apache.hbase.thirdparty.io.netty.channel.Channel;
-034import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler;
-035import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;
-036import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapter;
-037import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;
-038import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise;
-039import 
org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;
-040import 
org.apache.hbase.thirdparty.io.netty.handler.codec.LengthFieldBasedFrameDecoder;
-041import 
org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder;
-042import 
org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;
-043import 
org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-044import 
org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;
-045import 
org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;
-046import 
org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;
-047
-048import java.io.IOException;
-049import java.lang.reflect.Field;
-050import 
java.lang.reflect.InvocationTargetException;
-051import java.lang.reflect.Method;
-052import java.net.InetAddress;
-053import java.net.InetSocketAddress;
-054import java.nio.ByteBuffer;
-055import 
java.security.GeneralSecurityException;
-056import java.util.Arrays;
-057import java.util.Collections;
-058import java.util.List;
-059import java.util.Map;
-060import java.util.Set;
-061import java.util.concurrent.TimeUnit;
-062import 
java.util.concurrent.atomic.AtomicBoolean;
-063
-064import 
javax.security.auth.callback.Callback;
-065import 
javax.security.auth.callback.CallbackHandler;
-066import 
javax.security.auth.callback.NameCallback;
-067import 
javax.security.auth.callback.PasswordCallback;
-068import 
javax.security.auth.callback.UnsupportedCallbackException;
-069import 
javax.security.sasl.RealmCallback;
-070import 
javax.security.sasl.RealmChoiceCallback;
-071import javax.security.sasl.Sasl;
-072import javax.security.sasl.SaslClient;
-073import 
javax.security.sasl.SaslException;
-074
-075import 
org.apache.commons.codec.binary.Base64;
-076import 
org.apache.commons.lang3.StringUtils;
-077import 
org.apache.hadoop.conf.Configuration;
-078import 
org.apache.hadoop.crypto.CipherOption;
-079import 
org.apache.hadoop.crypto.CipherSuite;
-080import 
org.apache.hadoop.crypto.CryptoCodec;
-081import 
org.apache.hadoop.crypto.Decryptor;
-082import 
org.apache.hadoop.crypto.Encryptor;
-083import 
org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
-084import 
org.apache.hadoop.fs.FileEncryptionInfo;
-085import 
org.apache.yetus.audience.InterfaceAudience;
-086import org.slf4j.Logger;
-087import org.slf4j.LoggerFactory;
-088
-089import com.google.protobuf.ByteString;
-090import 
org.apache.hadoop.hdfs.DFSClient;
-091import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-092import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-093import 

[14/51] [partial] hbase-site git commit: Published site at 85b41f36e01214b6485c9352875c84ebf877dab3.

2018-06-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a5c66de0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
index c10cfbf..a3e2f4a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
@@ -3371,7 +3371,7 @@
 3363private V result = null;
 3364
 3365private final HBaseAdmin admin;
-3366private final Long procId;
+3366protected final Long procId;
 3367
 3368public ProcedureFuture(final 
HBaseAdmin admin, final Long procId) {
 3369  this.admin = admin;
@@ -3653,653 +3653,651 @@
 3645 * @return a description of the 
operation
 3646 */
 3647protected String getDescription() 
{
-3648  return "Operation: " + 
getOperationType() + ", "
-3649  + "Table Name: " + 
tableName.getNameWithNamespaceInclAsString();
-3650
-3651}
-3652
-3653protected abstract class 
TableWaitForStateCallable implements WaitForStateCallable {
-3654  @Override
-3655  public void 
throwInterruptedException() throws InterruptedIOException {
-3656throw new 
InterruptedIOException("Interrupted while waiting for operation: "
-3657+ getOperationType() + " on 
table: " + tableName.getNameWithNamespaceInclAsString());
-3658  }
-3659
-3660  @Override
-3661  public void 
throwTimeoutException(long elapsedTime) throws TimeoutException {
-3662throw new TimeoutException("The 
operation: " + getOperationType() + " on table: " +
-3663tableName.getNameAsString() 
+ " has not completed after " + elapsedTime + "ms");
-3664  }
-3665}
-3666
-3667@Override
-3668protected V 
postOperationResult(final V result, final long deadlineTs)
-3669throws IOException, 
TimeoutException {
-3670  LOG.info(getDescription() + " 
completed");
-3671  return 
super.postOperationResult(result, deadlineTs);
-3672}
-3673
-3674@Override
-3675protected V 
postOperationFailure(final IOException exception, final long deadlineTs)
-3676throws IOException, 
TimeoutException {
-3677  LOG.info(getDescription() + " 
failed with " + exception.getMessage());
-3678  return 
super.postOperationFailure(exception, deadlineTs);
-3679}
-3680
-3681protected void 
waitForTableEnabled(final long deadlineTs)
-3682throws IOException, 
TimeoutException {
-3683  waitForState(deadlineTs, new 
TableWaitForStateCallable() {
-3684@Override
-3685public boolean checkState(int 
tries) throws IOException {
-3686  try {
-3687if 
(getAdmin().isTableAvailable(tableName)) {
-3688  return true;
-3689}
-3690  } catch 
(TableNotFoundException tnfe) {
-3691LOG.debug("Table " + 
tableName.getNameWithNamespaceInclAsString()
-3692+ " was not enabled, 
sleeping. tries=" + tries);
-3693  }
-3694  return false;
-3695}
-3696  });
-3697}
-3698
-3699protected void 
waitForTableDisabled(final long deadlineTs)
-3700throws IOException, 
TimeoutException {
-3701  waitForState(deadlineTs, new 
TableWaitForStateCallable() {
-3702@Override
-3703public boolean checkState(int 
tries) throws IOException {
-3704  return 
getAdmin().isTableDisabled(tableName);
-3705}
-3706  });
-3707}
-3708
-3709protected void 
waitTableNotFound(final long deadlineTs)
-3710throws IOException, 
TimeoutException {
-3711  waitForState(deadlineTs, new 
TableWaitForStateCallable() {
-3712@Override
-3713public boolean checkState(int 
tries) throws IOException {
-3714  return 
!getAdmin().tableExists(tableName);
-3715}
-3716  });
-3717}
-3718
-3719protected void 
waitForSchemaUpdate(final long deadlineTs)
-3720throws IOException, 
TimeoutException {
-3721  waitForState(deadlineTs, new 
TableWaitForStateCallable() {
-3722@Override
-3723public boolean checkState(int 
tries) throws IOException {
-3724  return 
getAdmin().getAlterStatus(tableName).getFirst() == 0;
-3725}
-3726  });
-3727}
-3728
-3729protected void 
waitForAllRegionsOnline(final long deadlineTs, final byte[][] splitKeys)
-3730throws IOException, 
TimeoutException {
-3731  final TableDescriptor desc = 
getTableDescriptor();
-3732  final AtomicInteger actualRegCount 
= new AtomicInteger(0);
-3733  final MetaTableAccessor.Visitor 
visitor = new MetaTableAccessor.Visitor() {
-3734@Override
-3735public boolean visit(Result 

[14/51] [partial] hbase-site git commit: Published site at 6198e1fc7dfa85c3bc6b2855f9a5fb5f4b2354ff.

2018-06-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 05e85cc..167a2dc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -247,14 +247,14 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.master.HMaster
-abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createMetaBootstrap,
 createNamespace,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable,
 decommissionRegionSer
 vers, decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable,
 enableReplicationPeer, enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics,
 getClusterMetricsWi
 thoutCoprocessor, getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterFinishedInitializationTime,
 getMasterMetrics,
 getMasterProcedureExecutor,
 getMasterProcedureManagerHost,
 getMasterQuotaManager,
 getMasterRpcServices,
 getMasterStartTime,
 getMasterWalManager,
 getMergePlanCount,
 getMetaTableObserver,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 getNumWALFiles,
 getProcedures, getProcessName,
 getQuotaObserverChore,
 getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getReplicationLoad,
 getReplicationPeerConfig,
 getReplicationPeerManager,
 getServerManager,
 getServerName,
 getSnapshotManager,
 getSnapshotQuotaObserverChore,
 getSpaceQuotaSnapshotNotifier,
 getSplitOrMergeTracker,
 getSplitPlanCount,
 getTableDescriptors,
 getTableRegionForRow,
 getTableStateManager,
 getUseThisHostnameInstead,
 
 getWalProcedureStore, getZooKeeper,
 initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 isActiveMaster,
 isBalancerOn,
 isCatalogJanitorEnabled,
 isCleanerChoreEnabled,
 isInitialized,
 isInMaintenanceMode, isNormalizerOn,
 isSplitOrMergeEnabled,
 listDecommissionedRegionServers,
 listReplicationPeers,
 listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames,
 listTableNamesByNamespace,
 login,
 main,
 mergeRegions,
 modifyColumn,
 m
 odifyNamespace, modifyTable,
 move,
 normalizeRegions,
 recommissionRegionServer,
 registerService,
 remoteProcedureCompleted,
 remoteProcedureFailed, removeReplicationPeer,
 reportMobCompactionEnd,
 reportMobCompactionStart,
 requestMobCompaction,
 restoreSnapshot,
 setCatalogJani
 torEnabled, setInitialized,
 shutdown,
 splitRegion,
 stop,
 stopMaster,
 stopServiceThreads,
 truncateTable,
 updateConfigurationForSpaceQuotaObserver, href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#updateReplicationPeerConfig-java.lang.String-org.apache.hadoop.hbase.replication.ReplicationPeerConfig-">updateReplicationPeerConfig,
 > href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#waitForMasterActive--">waitForMasterActive
+abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createMetaBootstrap,
 createNamespace,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable,
 decommissionRegionSer
 vers, decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable,
 enableReplicationPeer, enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics,
 getClusterMetricsWi
 thoutCoprocessor, getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 

[14/51] [partial] hbase-site git commit: Published site at 14087cc919da9f2e0b1a68f701f6365ad9d1d71f.

2018-06-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
index 4ba1d04..48e3274 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
@@ -489,6 +489,14 @@ extends 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_REQUEST_COUNT
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_REQUEST_COUNT_DESC
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DATA_SIZE_WITHOUT_WAL
 
 
@@ -1826,13 +1834,39 @@ extends 
 
 
+
+
+
+
+
+CP_REQUEST_COUNT
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CP_REQUEST_COUNT
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+CP_REQUEST_COUNT_DESC
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CP_REQUEST_COUNT_DESC
+
+See Also:
+Constant
 Field Values
+
+
+
 
 
 
 
 
 FILTERED_READ_REQUEST_COUNT
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FILTERED_READ_REQUEST_COUNT
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FILTERED_READ_REQUEST_COUNT
 
 See Also:
 Constant
 Field Values
@@ -1845,7 +1879,7 @@ extends 
 
 FILTERED_READ_REQUEST_COUNT_DESC
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FILTERED_READ_REQUEST_COUNT_DESC
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FILTERED_READ_REQUEST_COUNT_DESC
 
 See Also:
 Constant
 Field Values
@@ -1858,7 +1892,7 @@ extends 
 
 WRITE_REQUEST_COUNT
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WRITE_REQUEST_COUNT
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WRITE_REQUEST_COUNT
 
 See Also:
 Constant
 Field Values
@@ -1871,7 +1905,7 @@ extends 
 
 WRITE_REQUEST_COUNT_DESC
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WRITE_REQUEST_COUNT_DESC
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WRITE_REQUEST_COUNT_DESC
 
 See Also:
 Constant
 Field Values
@@ -1884,7 +1918,7 @@ extends 
 
 CHECK_MUTATE_FAILED_COUNT
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_FAILED_COUNT
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_FAILED_COUNT
 
 See Also:
 Constant
 Field Values
@@ -1897,7 +1931,7 @@ extends 
 
 CHECK_MUTATE_FAILED_COUNT_DESC
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_FAILED_COUNT_DESC
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_FAILED_COUNT_DESC
 
 See Also:
 Constant
 Field Values
@@ -1910,7 +1944,7 @@ extends 
 
 CHECK_MUTATE_PASSED_COUNT
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_PASSED_COUNT
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_PASSED_COUNT
 
 See Also:
 Constant
 Field Values
@@ -1923,7 +1957,7 @@ extends 
 
 CHECK_MUTATE_PASSED_COUNT_DESC
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_MUTATE_PASSED_COUNT_DESC
+static 

[14/51] [partial] hbase-site git commit: Published site at 72784c2d836a4b977667449d3adec5e8d15453f5.

2018-06-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b11656f/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
index b6e7636..592c2cc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ThrowableAbortable.html
@@ -356,3901 +356,3924 @@
 348  public FutureVoid 
modifyTableAsync(TableDescriptor td) throws IOException {
 349ModifyTableResponse response = 
executeCallable(
 350  new 
MasterCallableModifyTableResponse(getConnection(), 
getRpcControllerFactory()) {
-351@Override
-352protected ModifyTableResponse 
rpcCall() throws Exception {
-353  
setPriority(td.getTableName());
-354  ModifyTableRequest request = 
RequestConverter.buildModifyTableRequest(
-355td.getTableName(), td, 
ng.getNonceGroup(), ng.newNonce());
-356  return 
master.modifyTable(getRpcController(), request);
-357}
-358  });
-359return new ModifyTableFuture(this, 
td.getTableName(), response);
-360  }
-361
-362  @Override
-363  public ListTableDescriptor 
listTableDescriptorsByNamespace(byte[] name) throws IOException {
-364return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-365getRpcControllerFactory()) {
-366  @Override
-367  protected 
ListTableDescriptor rpcCall() throws Exception {
-368return 
master.listTableDescriptorsByNamespace(getRpcController(),
-369
ListTableDescriptorsByNamespaceRequest.newBuilder()
-370  
.setNamespaceName(Bytes.toString(name)).build())
-371.getTableSchemaList()
-372.stream()
-373
.map(ProtobufUtil::toTableDescriptor)
-374
.collect(Collectors.toList());
-375  }
-376});
-377  }
-378
-379  @Override
-380  public ListTableDescriptor 
listTableDescriptors(ListTableName tableNames) throws IOException {
-381return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-382getRpcControllerFactory()) {
-383  @Override
-384  protected 
ListTableDescriptor rpcCall() throws Exception {
-385GetTableDescriptorsRequest req 
=
-386
RequestConverter.buildGetTableDescriptorsRequest(tableNames);
-387  return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
-388  req));
-389  }
-390});
-391  }
-392
-393  @Override
-394  public ListRegionInfo 
getRegions(final ServerName sn) throws IOException {
-395AdminService.BlockingInterface admin 
= this.connection.getAdmin(sn);
-396// TODO: There is no timeout on this 
controller. Set one!
-397HBaseRpcController controller = 
rpcControllerFactory.newController();
-398return 
ProtobufUtil.getOnlineRegions(controller, admin);
-399  }
-400
-401  @Override
-402  public ListRegionInfo 
getRegions(TableName tableName) throws IOException {
-403if 
(TableName.isMetaTableName(tableName)) {
-404  return 
Arrays.asList(RegionInfoBuilder.FIRST_META_REGIONINFO);
-405} else {
-406  return 
MetaTableAccessor.getTableRegions(connection, tableName, true);
-407}
-408  }
-409
-410  private static class 
AbortProcedureFuture extends ProcedureFutureBoolean {
-411private boolean isAbortInProgress;
-412
-413public AbortProcedureFuture(
-414final HBaseAdmin admin,
-415final Long procId,
-416final Boolean abortProcResponse) 
{
-417  super(admin, procId);
-418  this.isAbortInProgress = 
abortProcResponse;
-419}
-420
-421@Override
-422public Boolean get(long timeout, 
TimeUnit unit)
-423throws InterruptedException, 
ExecutionException, TimeoutException {
-424  if (!this.isAbortInProgress) {
-425return false;
-426  }
-427  super.get(timeout, unit);
-428  return true;
-429}
-430  }
-431
-432  /** @return Connection used by this 
object. */
-433  @Override
-434  public Connection getConnection() {
-435return connection;
-436  }
-437
-438  @Override
-439  public boolean tableExists(final 
TableName tableName) throws IOException {
-440return executeCallable(new 
RpcRetryingCallableBoolean() {
-441  @Override
-442  protected Boolean rpcCall(int 
callTimeout) throws Exception {
-443return 
MetaTableAccessor.tableExists(connection, tableName);
-444  }
-445});
-446  }
-447
-448  @Override
-449  public HTableDescriptor[] listTables() 
throws IOException {
-450return listTables((Pattern)null, 
false);
-451  }
-452
-453  @Override
-454  public HTableDescriptor[] 
listTables(Pattern pattern) throws IOException {

[14/51] [partial] hbase-site git commit: Published site at 9101fc246f86445006bfbcdfda5cc495016dc280.

2018-06-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/65565d77/devapidocs/src-html/org/apache/hadoop/hbase/master/CatalogJanitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/CatalogJanitor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/CatalogJanitor.html
index 4229646..1814633 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/CatalogJanitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/CatalogJanitor.html
@@ -119,358 +119,355 @@
 111  protected void chore() {
 112try {
 113  AssignmentManager am = 
this.services.getAssignmentManager();
-114  if (this.enabled.get()
-115   
!this.services.isInMaintenanceMode()
-116   am != null
-117   
am.isFailoverCleanupDone()
-118   
!am.hasRegionsInTransition()) {
-119scan();
-120  } else {
-121LOG.warn("CatalogJanitor is 
disabled! Enabled=" + this.enabled.get() +
-122", maintenanceMode=" + 
this.services.isInMaintenanceMode() +
-123", am=" + am + ", 
failoverCleanupDone=" + (am != null  am.isFailoverCleanupDone()) +
-124", hasRIT=" + (am != null 
 am.hasRegionsInTransition()));
-125  }
-126} catch (IOException e) {
-127  LOG.warn("Failed scan of catalog 
table", e);
-128}
-129  }
-130
-131  /**
-132   * Scans hbase:meta and returns a 
number of scanned rows, and a map of merged
-133   * regions, and an ordered map of split 
parents.
-134   * @return triple of scanned rows, map 
of merged regions and map of split
-135   * parent regioninfos
-136   * @throws IOException
-137   */
-138  TripleInteger, MapRegionInfo, 
Result, MapRegionInfo, Result
-139getMergedRegionsAndSplitParents() 
throws IOException {
-140return 
getMergedRegionsAndSplitParents(null);
-141  }
-142
-143  /**
-144   * Scans hbase:meta and returns a 
number of scanned rows, and a map of merged
-145   * regions, and an ordered map of split 
parents. if the given table name is
-146   * null, return merged regions and 
split parents of all tables, else only the
-147   * specified table
-148   * @param tableName null represents all 
tables
-149   * @return triple of scanned rows, and 
map of merged regions, and map of split
-150   * parent regioninfos
-151   * @throws IOException
-152   */
-153  TripleInteger, MapRegionInfo, 
Result, MapRegionInfo, Result
-154getMergedRegionsAndSplitParents(final 
TableName tableName) throws IOException {
-155final boolean isTableSpecified = 
(tableName != null);
-156// TODO: Only works with single 
hbase:meta region currently.  Fix.
-157final AtomicInteger count = new 
AtomicInteger(0);
-158// Keep Map of found split parents.  
There are candidates for cleanup.
-159// Use a comparator that has split 
parents come before its daughters.
-160final MapRegionInfo, Result 
splitParents = new TreeMap(new SplitParentFirstComparator());
-161final MapRegionInfo, Result 
mergedRegions = new TreeMap(RegionInfo.COMPARATOR);
-162// This visitor collects split 
parents and counts rows in the hbase:meta table
-163
-164MetaTableAccessor.Visitor visitor = 
new MetaTableAccessor.Visitor() {
-165  @Override
-166  public boolean visit(Result r) 
throws IOException {
-167if (r == null || r.isEmpty()) 
return true;
-168count.incrementAndGet();
-169RegionInfo info = 
MetaTableAccessor.getRegionInfo(r);
-170if (info == null) return true; // 
Keep scanning
-171if (isTableSpecified
-172 
info.getTable().compareTo(tableName)  0) {
-173  // Another table, stop 
scanning
-174  return false;
-175}
-176if (LOG.isTraceEnabled()) 
LOG.trace("" + info + " IS-SPLIT_PARENT=" + info.isSplitParent());
-177if (info.isSplitParent()) 
splitParents.put(info, r);
-178if 
(r.getValue(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER) != null) 
{
-179  mergedRegions.put(info, r);
-180}
-181// Returning true means "keep 
scanning"
-182return true;
-183  }
-184};
-185
-186// Run full scan of hbase:meta 
catalog table passing in our custom visitor with
-187// the start row
-188
MetaTableAccessor.scanMetaForTableRegions(this.connection, visitor, 
tableName);
+114  if (this.enabled.get()  
!this.services.isInMaintenanceMode()  am != null 
+115am.isMetaLoaded()  
!am.hasRegionsInTransition()) {
+116scan();
+117  } else {
+118LOG.warn("CatalogJanitor is 
disabled! Enabled=" + this.enabled.get() +
+119  ", maintenanceMode=" + 
this.services.isInMaintenanceMode() + ", am=" + am +
+120  ", metaLoaded=" + (am != null 
 am.isMetaLoaded()) + ", hasRIT=" +
+121  (am != null  
am.hasRegionsInTransition()));
+122  }
+123} catch (IOException e) {
+124  LOG.warn("Failed scan of catalog 

[14/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-132  }

[14/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

2018-06-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerState.html
index 5420d82..6ea3672 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerState.html
@@ -316,687 +316,728 @@
 308}
 309  }
 310
-311  public enum ServerState { ONLINE, 
SPLITTING, OFFLINE }
-312  public static class ServerStateNode 
implements ComparableServerStateNode {
-313private final ServerReportEvent 
reportEvent;
-314
-315private final 
SetRegionStateNode regions;
-316private final ServerName 
serverName;
-317
-318private volatile ServerState state = 
ServerState.ONLINE;
-319private volatile int versionNumber = 
0;
-320
-321public ServerStateNode(final 
ServerName serverName) {
-322  this.serverName = serverName;
-323  this.regions = 
ConcurrentHashMap.newKeySet();
-324  this.reportEvent = new 
ServerReportEvent(serverName);
-325}
-326
-327public ServerName getServerName() {
-328  return serverName;
-329}
+311  /**
+312   * Server State.
+313   */
+314  public enum ServerState {
+315/**
+316 * Initial state. Available.
+317 */
+318ONLINE,
+319
+320/**
+321 * Server expired/crashed. Currently 
undergoing WAL splitting.
+322 */
+323SPLITTING,
+324
+325/**
+326 * WAL splitting done.
+327 */
+328OFFLINE
+329  }
 330
-331public ServerState getState() {
-332  return state;
-333}
-334
-335public int getVersionNumber() {
-336  return versionNumber;
-337}
-338
-339public ProcedureEvent? 
getReportEvent() {
-340  return reportEvent;
-341}
+331  /**
+332   * State of Server; list of hosted 
regions, etc.
+333   */
+334  public static class ServerStateNode 
implements ComparableServerStateNode {
+335private final ServerReportEvent 
reportEvent;
+336
+337private final 
SetRegionStateNode regions;
+338private final ServerName 
serverName;
+339
+340private volatile ServerState state = 
ServerState.ONLINE;
+341private volatile int versionNumber = 
0;
 342
-343public boolean isInState(final 
ServerState... expected) {
-344  boolean expectedState = false;
-345  if (expected != null) {
-346for (int i = 0; i  
expected.length; ++i) {
-347  expectedState |= (state == 
expected[i]);
-348}
-349  }
-350  return expectedState;
+343public ServerStateNode(final 
ServerName serverName) {
+344  this.serverName = serverName;
+345  this.regions = 
ConcurrentHashMap.newKeySet();
+346  this.reportEvent = new 
ServerReportEvent(serverName);
+347}
+348
+349public ServerName getServerName() {
+350  return serverName;
 351}
 352
-353public void setState(final 
ServerState state) {
-354  this.state = state;
+353public ServerState getState() {
+354  return state;
 355}
 356
-357public void setVersionNumber(final 
int versionNumber) {
-358  this.versionNumber = 
versionNumber;
+357public int getVersionNumber() {
+358  return versionNumber;
 359}
 360
-361public SetRegionStateNode 
getRegions() {
-362  return regions;
+361public ProcedureEvent? 
getReportEvent() {
+362  return reportEvent;
 363}
 364
-365public int getRegionCount() {
-366  return regions.size();
+365public boolean isOffline() {
+366  return 
this.state.equals(ServerState.OFFLINE);
 367}
 368
-369public ArrayListRegionInfo 
getRegionInfoList() {
-370  ArrayListRegionInfo hris = 
new ArrayListRegionInfo(regions.size());
-371  for (RegionStateNode region: 
regions) {
-372
hris.add(region.getRegionInfo());
-373  }
-374  return hris;
-375}
-376
-377public void addRegion(final 
RegionStateNode regionNode) {
-378  this.regions.add(regionNode);
-379}
-380
-381public void removeRegion(final 
RegionStateNode regionNode) {
-382  this.regions.remove(regionNode);
-383}
-384
-385@Override
-386public int compareTo(final 
ServerStateNode other) {
-387  return 
getServerName().compareTo(other.getServerName());
-388}
-389
-390@Override
-391public int hashCode() {
-392  return 
getServerName().hashCode();
+369public boolean isInState(final 
ServerState... expected) {
+370  boolean expectedState = false;
+371  if (expected != null) {
+372for (int i = 0; i  
expected.length; ++i) {
+373  expectedState |= (state == 
expected[i]);
+374}
+375  }
+376  return expectedState;
+377}
+378
+379

[14/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

2018-05-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
index acfc040..47f59ac 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
@@ -65,7 +65,7 @@
 057   *
 058   * A very short operation
 059   *
-060   * @return
+060   * @return whether compaction is 
required
 061   * @throws IOException
 062   */
 063  boolean commit(MonitoredTask status) 
throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.html
index 31ba835..04d65a9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.html
@@ -68,7 +68,7 @@
 060   * method based on the return type 
(INCLUDE) of this method. The values that can be returned by
 061   * this method are {@link 
MatchCode#INCLUDE}, {@link MatchCode#SEEK_NEXT_COL} and
 062   * {@link MatchCode#SEEK_NEXT_ROW}.
-063   * @param cell
+063   * @param cell a cell with the column 
to match against
 064   * @param type The type of the Cell
 065   * @return The match code instance.
 066   * @throws IOException in case there is 
an internal consistency problem caused by a data
@@ -85,7 +85,7 @@
 077   * Implementations which include all 
the columns could just return {@link MatchCode#INCLUDE} in
 078   * the {@link #checkColumn(Cell, byte)} 
method and perform all the operations in this
 079   * checkVersions method.
-080   * @param cell
+080   * @param cell a cell with the column 
to match against
 081   * @param timestamp The timestamp of 
the cell.
 082   * @param type the type of the key 
value (Put/Delete)
 083   * @param ignoreCount indicates if the 
KV needs to be excluded while counting (used during

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.PeerRegionServerListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.PeerRegionServerListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.PeerRegionServerListener.html
index 098dc5e..a71f43d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.PeerRegionServerListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.PeerRegionServerListener.html
@@ -56,203 +56,190 @@
 048
 049  private static final Logger LOG = 
LoggerFactory.getLogger(HBaseReplicationEndpoint.class);
 050
-051  private Object zkwLock = new 
Object();
-052  private ZKWatcher zkw = null;
-053
-054  private ListServerName 
regionServers = new ArrayList(0);
-055  private long lastRegionServerUpdate;
-056
-057  protected void disconnect() {
-058synchronized (zkwLock) {
-059  if (zkw != null) {
-060zkw.close();
-061  }
-062}
-063  }
-064
-065  /**
-066   * A private method used to 
re-establish a zookeeper session with a peer cluster.
-067   * @param ke
-068   */
-069  protected void 
reconnect(KeeperException ke) {
-070if (ke instanceof 
ConnectionLossException || ke instanceof SessionExpiredException
-071|| ke instanceof 
AuthFailedException) {
-072  String clusterKey = 
ctx.getPeerConfig().getClusterKey();
-073  LOG.warn("Lost the ZooKeeper 
connection for peer " + clusterKey, ke);
-074  try {
-075reloadZkWatcher();
-076  } catch (IOException io) {
-077LOG.warn("Creation of 
ZookeeperWatcher failed for peer " + clusterKey, io);
-078  }
-079}
-080  }
-081
-082  @Override
-083  public void start() {
-084startAsync();
-085  }
-086
-087  @Override
-088  public void stop() {
-089stopAsync();
-090  }
-091
-092  @Override
-093  protected void doStart() {
-094try {
-095  reloadZkWatcher();
-096  notifyStarted();
-097} catch (IOException e) {
-098  notifyFailed(e);
-099}
-100  }
-101
-102  @Override
-103  protected void doStop() {
-104disconnect();
-105notifyStopped();
-106  }
-107
-108  @Override
-109  // Synchronize peer cluster connection 
attempts to 

[14/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

2018-05-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.html
index e768c9f..5aa1703 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.html
@@ -208,256 +208,273 @@
 200  for (RegionInfo region : 
regions.keySet()) {
 201if 
(!misplacedRegions.contains(region)) {
 202  String groupName = 
rsGroupInfoManager.getRSGroupOfTable(region.getTable());
-203  groupToRegion.put(groupName, 
region);
-204}
-205  }
-206  // Now the "groupToRegion" map has 
only the regions which have correct
-207  // assignments.
-208  for (String key : 
groupToRegion.keySet()) {
-209MapRegionInfo, ServerName 
currentAssignmentMap = new TreeMapRegionInfo, ServerName();
-210ListRegionInfo regionList 
= groupToRegion.get(key);
-211RSGroupInfo info = 
rsGroupInfoManager.getRSGroup(key);
-212ListServerName 
candidateList = filterOfflineServers(info, servers);
-213for (RegionInfo region : 
regionList) {
-214  
currentAssignmentMap.put(region, regions.get(region));
-215}
-216if(candidateList.size()  0) 
{
-217  
assignments.putAll(this.internalBalancer.retainAssignment(
-218  currentAssignmentMap, 
candidateList));
+203  if (groupName == null) {
+204LOG.info("Group not found for 
table " + region.getTable() + ", using default");
+205groupName = 
RSGroupInfo.DEFAULT_GROUP;
+206  }
+207  groupToRegion.put(groupName, 
region);
+208}
+209  }
+210  // Now the "groupToRegion" map has 
only the regions which have correct
+211  // assignments.
+212  for (String key : 
groupToRegion.keySet()) {
+213MapRegionInfo, ServerName 
currentAssignmentMap = new TreeMapRegionInfo, ServerName();
+214ListRegionInfo regionList 
= groupToRegion.get(key);
+215RSGroupInfo info = 
rsGroupInfoManager.getRSGroup(key);
+216ListServerName 
candidateList = filterOfflineServers(info, servers);
+217for (RegionInfo region : 
regionList) {
+218  
currentAssignmentMap.put(region, regions.get(region));
 219}
-220  }
-221
-222  for (RegionInfo region : 
misplacedRegions) {
-223String groupName = 
rsGroupInfoManager.getRSGroupOfTable(region.getTable());
-224RSGroupInfo info = 
rsGroupInfoManager.getRSGroup(groupName);
-225ListServerName 
candidateList = filterOfflineServers(info, servers);
-226ServerName server = 
this.internalBalancer.randomAssignment(region,
-227candidateList);
-228if (server != null) {
-229  if 
(!assignments.containsKey(server)) {
-230assignments.put(server, new 
ArrayList());
-231  }
-232  
assignments.get(server).add(region);
-233} else {
-234  //if not server is available 
assign to bogus so it ends up in RIT
-235  
if(!assignments.containsKey(LoadBalancer.BOGUS_SERVER_NAME)) {
-236
assignments.put(LoadBalancer.BOGUS_SERVER_NAME, new ArrayList());
-237  }
-238  
assignments.get(LoadBalancer.BOGUS_SERVER_NAME).add(region);
-239}
-240  }
-241  return assignments;
-242} catch (IOException e) {
-243  throw new HBaseIOException("Failed 
to do online retain assignment", e);
-244}
-245  }
-246
-247  @Override
-248  public ServerName 
randomAssignment(RegionInfo region,
-249  ListServerName servers) 
throws HBaseIOException {
-250ListMultimapString,RegionInfo 
regionMap = LinkedListMultimap.create();
-251ListMultimapString,ServerName 
serverMap = LinkedListMultimap.create();
-252
generateGroupMaps(Lists.newArrayList(region), servers, regionMap, serverMap);
-253ListServerName 
filteredServers = serverMap.get(regionMap.keySet().iterator().next());
-254return 
this.internalBalancer.randomAssignment(region, filteredServers);
-255  }
-256
-257  private void generateGroupMaps(
-258ListRegionInfo regions,
-259ListServerName servers,
-260ListMultimapString, 
RegionInfo regionMap,
-261ListMultimapString, 
ServerName serverMap) throws HBaseIOException {
-262try {
-263  for (RegionInfo region : regions) 
{
-264String groupName = 
rsGroupInfoManager.getRSGroupOfTable(region.getTable());
-265if (groupName == null) {
-266  LOG.warn("Group for table 
"+region.getTable()+" is null");
-267}
-268regionMap.put(groupName, 
region);
-269  }
-270  for (String groupKey : 
regionMap.keySet()) {
-271   

[14/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

2018-05-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
index 3f8844b..cdb9398 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
@@ -140,2712 +140,2713 @@
 132public class PerformanceEvaluation 
extends Configured implements Tool {
 133  static final String RANDOM_SEEK_SCAN = 
"randomSeekScan";
 134  static final String RANDOM_READ = 
"randomRead";
-135  private static final Logger LOG = 
LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
-136  private static final ObjectMapper 
MAPPER = new ObjectMapper();
-137  static {
-138
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
-139  }
-140
-141  public static final String TABLE_NAME = 
"TestTable";
-142  public static final String 
FAMILY_NAME_BASE = "info";
-143  public static final byte[] FAMILY_ZERO 
= Bytes.toBytes("info0");
-144  public static final byte[] COLUMN_ZERO 
= Bytes.toBytes("" + 0);
-145  public static final int 
DEFAULT_VALUE_LENGTH = 1000;
-146  public static final int ROW_LENGTH = 
26;
-147
-148  private static final int ONE_GB = 1024 
* 1024 * 1000;
-149  private static final int 
DEFAULT_ROWS_PER_GB = ONE_GB / DEFAULT_VALUE_LENGTH;
-150  // TODO : should we make this 
configurable
-151  private static final int TAG_LENGTH = 
256;
-152  private static final DecimalFormat FMT 
= new DecimalFormat("0.##");
-153  private static final MathContext CXT = 
MathContext.DECIMAL64;
-154  private static final BigDecimal 
MS_PER_SEC = BigDecimal.valueOf(1000);
-155  private static final BigDecimal 
BYTES_PER_MB = BigDecimal.valueOf(1024 * 1024);
-156  private static final TestOptions 
DEFAULT_OPTS = new TestOptions();
-157
-158  private static MapString, 
CmdDescriptor COMMANDS = new TreeMap();
-159  private static final Path PERF_EVAL_DIR 
= new Path("performance_evaluation");
-160
-161  static {
-162
addCommandDescriptor(AsyncRandomReadTest.class, "asyncRandomRead",
-163"Run async random read test");
-164
addCommandDescriptor(AsyncRandomWriteTest.class, "asyncRandomWrite",
-165"Run async random write test");
-166
addCommandDescriptor(AsyncSequentialReadTest.class, "asyncSequentialRead",
-167"Run async sequential read 
test");
-168
addCommandDescriptor(AsyncSequentialWriteTest.class, "asyncSequentialWrite",
-169"Run async sequential write 
test");
-170
addCommandDescriptor(AsyncScanTest.class, "asyncScan",
-171"Run async scan test (read every 
row)");
-172
addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
-173  "Run random read test");
-174
addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
-175  "Run random seek and scan 100 
test");
-176
addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
-177  "Run random seek scan with both 
start and stop row (max 10 rows)");
-178
addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
-179  "Run random seek scan with both 
start and stop row (max 100 rows)");
-180
addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
-181  "Run random seek scan with both 
start and stop row (max 1000 rows)");
-182
addCommandDescriptor(RandomScanWithRange1Test.class, "scanRange1",
-183  "Run random seek scan with both 
start and stop row (max 1 rows)");
-184
addCommandDescriptor(RandomWriteTest.class, "randomWrite",
-185  "Run random write test");
-186
addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
-187  "Run sequential read test");
-188
addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
-189  "Run sequential write test");
-190addCommandDescriptor(ScanTest.class, 
"scan",
-191  "Run scan test (read every 
row)");
-192
addCommandDescriptor(FilteredScanTest.class, "filterScan",
-193  "Run scan test using a filter to 
find a specific row based on it's value " +
-194  "(make sure to use --rows=20)");
-195
addCommandDescriptor(IncrementTest.class, "increment",
-196  "Increment on each row; clients 
overlap on keyspace so some concurrent operations");
-197
addCommandDescriptor(AppendTest.class, "append",
-198  "Append on each row; clients 
overlap on keyspace so some concurrent operations");
-199
addCommandDescriptor(CheckAndMutateTest.class, "checkAndMutate",
-200  "CheckAndMutate on each row; 
clients overlap on keyspace so some concurrent operations");
-201
addCommandDescriptor(CheckAndPutTest.class, 

[14/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

2018-05-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index 4a879bb..7d27402 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -300,7 +300,7 @@
 292  private MapString, 
com.google.protobuf.Service coprocessorServiceHandlers = 
Maps.newHashMap();
 293
 294  // Track data size in all memstores
-295  private final MemStoreSizing 
memStoreSize = new MemStoreSizing();
+295  private final MemStoreSizing 
memStoreSizing = new ThreadSafeMemStoreSizing();
 296  private final RegionServicesForStores 
regionServicesForStores = new RegionServicesForStores(this);
 297
 298  // Debug possible data loss due to WAL 
off
@@ -1218,7389 +1218,7399 @@
 1210   * Increase the size of mem store in 
this region and the size of global mem
 1211   * store
 1212   */
-1213  public void 
incMemStoreSize(MemStoreSize memStoreSize) {
-1214if (this.rsAccounting != null) {
-1215  
rsAccounting.incGlobalMemStoreSize(memStoreSize);
-1216}
-1217long dataSize;
-1218synchronized (this.memStoreSize) {
-1219  
this.memStoreSize.incMemStoreSize(memStoreSize);
-1220  dataSize = 
this.memStoreSize.getDataSize();
-1221}
-1222
checkNegativeMemStoreDataSize(dataSize, memStoreSize.getDataSize());
-1223  }
-1224
-1225  public void 
decrMemStoreSize(MemStoreSize memStoreSize) {
-1226if (this.rsAccounting != null) {
-1227  
rsAccounting.decGlobalMemStoreSize(memStoreSize);
-1228}
-1229long size;
-1230synchronized (this.memStoreSize) {
-1231  
this.memStoreSize.decMemStoreSize(memStoreSize);
-1232  size = 
this.memStoreSize.getDataSize();
+1213  void incMemStoreSize(MemStoreSize mss) 
{
+1214incMemStoreSize(mss.getDataSize(), 
mss.getHeapSize(), mss.getOffHeapSize());
+1215  }
+1216
+1217  void incMemStoreSize(long 
dataSizeDelta, long heapSizeDelta, long offHeapSizeDelta) {
+1218if (this.rsAccounting != null) {
+1219  
rsAccounting.incGlobalMemStoreSize(dataSizeDelta, heapSizeDelta, 
offHeapSizeDelta);
+1220}
+1221long dataSize =
+1222
this.memStoreSizing.incMemStoreSize(dataSizeDelta, heapSizeDelta, 
offHeapSizeDelta);
+1223
checkNegativeMemStoreDataSize(dataSize, dataSizeDelta);
+1224  }
+1225
+1226  void decrMemStoreSize(MemStoreSize 
mss) {
+1227decrMemStoreSize(mss.getDataSize(), 
mss.getHeapSize(), mss.getOffHeapSize());
+1228  }
+1229
+1230  void decrMemStoreSize(long 
dataSizeDelta, long heapSizeDelta, long offHeapSizeDelta) {
+1231if (this.rsAccounting != null) {
+1232  
rsAccounting.decGlobalMemStoreSize(dataSizeDelta, heapSizeDelta, 
offHeapSizeDelta);
 1233}
-1234checkNegativeMemStoreDataSize(size, 
-memStoreSize.getDataSize());
-1235  }
-1236
-1237  private void 
checkNegativeMemStoreDataSize(long memStoreDataSize, long delta) {
-1238// This is extremely bad if we make 
memStoreSize negative. Log as much info on the offending
-1239// caller as possible. (memStoreSize 
might be a negative value already -- freeing memory)
-1240if (memStoreDataSize  0) {
-1241  LOG.error("Asked to modify this 
region's (" + this.toString()
-1242  + ") memStoreSize to a 
negative value which is incorrect. Current memStoreSize="
-1243  + (memStoreDataSize - delta) + 
", delta=" + delta, new Exception());
-1244}
-1245  }
-1246
-1247  @Override
-1248  public RegionInfo getRegionInfo() {
-1249return this.fs.getRegionInfo();
-1250  }
-1251
-1252  /**
-1253   * @return Instance of {@link 
RegionServerServices} used by this HRegion.
-1254   * Can be null.
-1255   */
-1256  RegionServerServices 
getRegionServerServices() {
-1257return this.rsServices;
-1258  }
-1259
-1260  @Override
-1261  public long getReadRequestsCount() {
-1262return readRequestsCount.sum();
-1263  }
-1264
-1265  @Override
-1266  public long 
getFilteredReadRequestsCount() {
-1267return 
filteredReadRequestsCount.sum();
-1268  }
-1269
-1270  @Override
-1271  public long getWriteRequestsCount() 
{
-1272return writeRequestsCount.sum();
-1273  }
-1274
-1275  @Override
-1276  public long getMemStoreDataSize() {
-1277return memStoreSize.getDataSize();
-1278  }
-1279
-1280  @Override
-1281  public long getMemStoreHeapSize() {
-1282return memStoreSize.getHeapSize();
-1283  }
-1284
-1285  @Override
-1286  public long getMemStoreOffHeapSize() 
{
-1287return 
memStoreSize.getOffHeapSize();
-1288  }
-1289
-1290  /** @return store services for this 
region, to access services required by store level needs */
-1291  public RegionServicesForStores 

[14/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

2018-05-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomReadTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomReadTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomReadTest.html
index 2510283..418c60c 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomReadTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomReadTest.html
@@ -77,77 +77,77 @@
 069import 
org.apache.hadoop.hbase.client.RowMutations;
 070import 
org.apache.hadoop.hbase.client.Scan;
 071import 
org.apache.hadoop.hbase.client.Table;
-072import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-073import 
org.apache.hadoop.hbase.filter.Filter;
-074import 
org.apache.hadoop.hbase.filter.FilterAllFilter;
-075import 
org.apache.hadoop.hbase.filter.FilterList;
-076import 
org.apache.hadoop.hbase.filter.PageFilter;
-077import 
org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-078import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-079import 
org.apache.hadoop.hbase.io.compress.Compression;
-080import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-081import 
org.apache.hadoop.hbase.io.hfile.RandomDistribution;
-082import 
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-083import 
org.apache.hadoop.hbase.regionserver.BloomType;
-084import 
org.apache.hadoop.hbase.regionserver.CompactingMemStore;
-085import 
org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
-086import 
org.apache.hadoop.hbase.trace.SpanReceiverHost;
-087import 
org.apache.hadoop.hbase.trace.TraceUtil;
-088import 
org.apache.hadoop.hbase.util.ByteArrayHashKey;
-089import 
org.apache.hadoop.hbase.util.Bytes;
-090import 
org.apache.hadoop.hbase.util.Hash;
-091import 
org.apache.hadoop.hbase.util.MurmurHash;
-092import 
org.apache.hadoop.hbase.util.Pair;
-093import 
org.apache.hadoop.hbase.util.YammerHistogramUtils;
-094import 
org.apache.hadoop.io.LongWritable;
-095import org.apache.hadoop.io.Text;
-096import org.apache.hadoop.mapreduce.Job;
-097import 
org.apache.hadoop.mapreduce.Mapper;
-098import 
org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
-099import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-100import 
org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
-101import org.apache.hadoop.util.Tool;
-102import 
org.apache.hadoop.util.ToolRunner;
-103import 
org.apache.htrace.core.ProbabilitySampler;
-104import org.apache.htrace.core.Sampler;
-105import 
org.apache.htrace.core.TraceScope;
-106import 
org.apache.yetus.audience.InterfaceAudience;
-107import org.slf4j.Logger;
-108import org.slf4j.LoggerFactory;
-109import 
org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
-110import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-111
-112/**
-113 * Script used evaluating HBase 
performance and scalability.  Runs a HBase
-114 * client that steps through one of a set 
of hardcoded tests or 'experiments'
-115 * (e.g. a random reads test, a random 
writes test, etc.). Pass on the
-116 * command-line which test to run and how 
many clients are participating in
-117 * this experiment. Run {@code 
PerformanceEvaluation --help} to obtain usage.
-118 *
-119 * pThis class sets up and runs 
the evaluation programs described in
-120 * Section 7, iPerformance 
Evaluation/i, of the a
-121 * 
href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
-122 * paper, pages 8-10.
-123 *
-124 * pBy default, runs as a 
mapreduce job where each mapper runs a single test
-125 * client. Can also run as a 
non-mapreduce, multithreaded application by
-126 * specifying {@code --nomapred}. Each 
client does about 1GB of data, unless
-127 * specified otherwise.
-128 */
-129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-130public class PerformanceEvaluation 
extends Configured implements Tool {
-131  static final String RANDOM_SEEK_SCAN = 
"randomSeekScan";
-132  static final String RANDOM_READ = 
"randomRead";
-133  private static final Logger LOG = 
LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
-134  private static final ObjectMapper 
MAPPER = new ObjectMapper();
-135  static {
-136
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
-137  }
-138
-139  public static final String TABLE_NAME = 
"TestTable";
-140  public static final byte[] FAMILY_NAME 
= Bytes.toBytes("info");
-141  public static final byte [] COLUMN_ZERO 
= Bytes.toBytes("" + 0);
-142  public static final byte [] 
QUALIFIER_NAME = COLUMN_ZERO;
+072import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+073import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+074import 
org.apache.hadoop.hbase.filter.Filter;
+075import 

[14/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/downloads.html
--
diff --git a/downloads.html b/downloads.html
index 099207f..7fad404 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase Downloads
 
@@ -366,7 +366,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 762a633..e0a5a62 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Export Control
@@ -331,7 +331,7 @@ for more details.
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/index.html
--
diff --git a/index.html b/index.html
index a2cff89..9fcef97 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Home
 
@@ -409,7 +409,7 @@ Apache HBase is an open-source, distributed, versioned, 
non-relational database
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/integration.html
--
diff --git a/integration.html b/integration.html
index 595dcba..f53166f 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  CI Management
 
@@ -291,7 +291,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/issue-tracking.html
--
diff --git a/issue-tracking.html b/issue-tracking.html
index 13aacbb..cf58b27 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Issue Management
 
@@ -288,7 +288,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/license.html
--
diff --git a/license.html b/license.html
index 2f9f8fa..435b1e2 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Licenses
 
@@ -491,7 +491,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/mail-lists.html
--
diff --git a/mail-lists.html b/mail-lists.html
index f41f35f..1133e92 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Mailing Lists
 
@@ -341,7 +341,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/metrics.html
--
diff --git a/metrics.html b/metrics.html
index fa63090..75f8027 100644
--- a/metrics.html
+++ b/metrics.html

[14/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

2018-04-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index 8302e28..c370eb9 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -2113,3031 +2113,3033 @@
 2105
errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
 2106tableName + " unable to 
delete dangling table state " + tableState);
 2107  }
-2108} else {
-2109  
errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
-2110  tableName + " has dangling 
table state " + tableState);
-2111}
-2112  }
-2113}
-2114// check that all tables have 
states
-2115for (TableName tableName : 
tablesInfo.keySet()) {
-2116  if (isTableIncluded(tableName) 
 !tableStates.containsKey(tableName)) {
-2117if (fixMeta) {
-2118  
MetaTableAccessor.updateTableState(connection, tableName, 
TableState.State.ENABLED);
-2119  TableState newState = 
MetaTableAccessor.getTableState(connection, tableName);
-2120  if (newState == null) {
-2121
errors.reportError(ERROR_CODE.NO_TABLE_STATE,
-2122"Unable to change state 
for table " + tableName + " in meta ");
-2123  }
-2124} else {
-2125  
errors.reportError(ERROR_CODE.NO_TABLE_STATE,
-2126  tableName + " has no state 
in meta ");
-2127}
-2128  }
-2129}
-2130  }
-2131
-2132  private void preCheckPermission() 
throws IOException, AccessDeniedException {
-2133if 
(shouldIgnorePreCheckPermission()) {
-2134  return;
-2135}
-2136
-2137Path hbaseDir = 
FSUtils.getRootDir(getConf());
-2138FileSystem fs = 
hbaseDir.getFileSystem(getConf());
-2139UserProvider userProvider = 
UserProvider.instantiate(getConf());
-2140UserGroupInformation ugi = 
userProvider.getCurrent().getUGI();
-2141FileStatus[] files = 
fs.listStatus(hbaseDir);
-2142for (FileStatus file : files) {
-2143  try {
-2144FSUtils.checkAccess(ugi, file, 
FsAction.WRITE);
-2145  } catch (AccessDeniedException 
ace) {
-2146LOG.warn("Got 
AccessDeniedException when preCheckPermission ", ace);
-2147
errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + 
ugi.getUserName()
-2148  + " does not have write perms 
to " + file.getPath()
-2149  + ". Please rerun hbck as hdfs 
user " + file.getOwner());
-2150throw ace;
-2151  }
-2152}
-2153  }
-2154
-2155  /**
-2156   * Deletes region from meta table
-2157   */
-2158  private void deleteMetaRegion(HbckInfo 
hi) throws IOException {
-2159
deleteMetaRegion(hi.metaEntry.getRegionName());
-2160  }
-2161
-2162  /**
-2163   * Deletes region from meta table
-2164   */
-2165  private void deleteMetaRegion(byte[] 
metaKey) throws IOException {
-2166Delete d = new Delete(metaKey);
-2167meta.delete(d);
-2168LOG.info("Deleted " + 
Bytes.toString(metaKey) + " from META" );
-2169  }
-2170
-2171  /**
-2172   * Reset the split parent region info 
in meta table
-2173   */
-2174  private void resetSplitParent(HbckInfo 
hi) throws IOException {
-2175RowMutations mutations = new 
RowMutations(hi.metaEntry.getRegionName());
-2176Delete d = new 
Delete(hi.metaEntry.getRegionName());
-2177
d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
-2178
d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
-2179mutations.add(d);
-2180
-2181RegionInfo hri = 
RegionInfoBuilder.newBuilder(hi.metaEntry)
-2182.setOffline(false)
-2183.setSplit(false)
-2184.build();
-2185Put p = 
MetaTableAccessor.makePutFromRegionInfo(hri, 
EnvironmentEdgeManager.currentTime());
-2186mutations.add(p);
-2187
-2188meta.mutateRow(mutations);
-2189LOG.info("Reset split parent " + 
hi.metaEntry.getRegionNameAsString() + " in META" );
-2190  }
-2191
-2192  /**
-2193   * This backwards-compatibility 
wrapper for permanently offlining a region
-2194   * that should not be alive.  If the 
region server does not support the
-2195   * "offline" method, it will use the 
closest unassign method instead.  This
-2196   * will basically work until one 
attempts to disable or delete the affected
-2197   * table.  The problem has to do with 
in-memory only master state, so
-2198   * restarting the HMaster or failing 
over to another should fix this.
-2199   */
-2200  private void offline(byte[] 
regionName) throws IOException {
-2201String regionString = 
Bytes.toStringBinary(regionName);
-2202if (!rsSupportsOffline) {
-2203  LOG.warn("Using unassign region " 
+ regionString

[14/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

2018-03-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
index 50caf18..61bf913 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
@@ -45,773 +45,774 @@
 037import java.util.TimeZone;
 038import java.util.concurrent.TimeUnit;
 039
-040import 
org.apache.commons.cli.CommandLine;
-041import 
org.apache.commons.cli.CommandLineParser;
-042import 
org.apache.commons.cli.HelpFormatter;
-043import org.apache.commons.cli.Option;
-044import 
org.apache.commons.cli.OptionGroup;
-045import org.apache.commons.cli.Options;
-046import 
org.apache.commons.cli.ParseException;
-047import 
org.apache.commons.cli.PosixParser;
-048import 
org.apache.commons.lang3.StringUtils;
-049import 
org.apache.hadoop.conf.Configuration;
-050import 
org.apache.hadoop.conf.Configured;
-051import org.apache.hadoop.fs.FileSystem;
-052import org.apache.hadoop.fs.Path;
-053import org.apache.hadoop.hbase.Cell;
-054import 
org.apache.hadoop.hbase.CellComparator;
-055import 
org.apache.hadoop.hbase.CellUtil;
-056import 
org.apache.hadoop.hbase.HBaseConfiguration;
-057import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-058import 
org.apache.hadoop.hbase.HConstants;
-059import 
org.apache.hadoop.hbase.HRegionInfo;
-060import 
org.apache.hadoop.hbase.KeyValue;
-061import 
org.apache.hadoop.hbase.KeyValueUtil;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.TableName;
-064import org.apache.hadoop.hbase.Tag;
-065import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-066import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-067import 
org.apache.hadoop.hbase.mob.MobUtils;
-068import 
org.apache.hadoop.hbase.regionserver.HStoreFile;
-069import 
org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
-070import 
org.apache.hadoop.hbase.util.BloomFilter;
-071import 
org.apache.hadoop.hbase.util.BloomFilterFactory;
-072import 
org.apache.hadoop.hbase.util.BloomFilterUtil;
-073import 
org.apache.hadoop.hbase.util.Bytes;
-074import 
org.apache.hadoop.hbase.util.FSUtils;
-075import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-076import org.apache.hadoop.util.Tool;
-077import 
org.apache.hadoop.util.ToolRunner;
-078import 
org.apache.yetus.audience.InterfaceAudience;
-079import 
org.apache.yetus.audience.InterfaceStability;
-080import org.slf4j.Logger;
-081import org.slf4j.LoggerFactory;
-082
-083import 
com.codahale.metrics.ConsoleReporter;
-084import com.codahale.metrics.Counter;
-085import com.codahale.metrics.Gauge;
-086import com.codahale.metrics.Histogram;
-087import com.codahale.metrics.Meter;
-088import 
com.codahale.metrics.MetricFilter;
-089import 
com.codahale.metrics.MetricRegistry;
-090import 
com.codahale.metrics.ScheduledReporter;
-091import com.codahale.metrics.Snapshot;
-092import com.codahale.metrics.Timer;
-093
-094/**
-095 * Implements pretty-printing 
functionality for {@link HFile}s.
-096 */
-097@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-098@InterfaceStability.Evolving
-099public class HFilePrettyPrinter extends 
Configured implements Tool {
-100
-101  private static final Logger LOG = 
LoggerFactory.getLogger(HFilePrettyPrinter.class);
-102
-103  private Options options = new 
Options();
-104
-105  private boolean verbose;
-106  private boolean printValue;
-107  private boolean printKey;
-108  private boolean shouldPrintMeta;
-109  private boolean printBlockIndex;
-110  private boolean printBlockHeaders;
-111  private boolean printStats;
-112  private boolean checkRow;
-113  private boolean checkFamily;
-114  private boolean isSeekToRow = false;
-115  private boolean checkMobIntegrity = 
false;
-116  private MapString, 
ListPath mobFileLocations;
-117  private static final int 
FOUND_MOB_FILES_CACHE_CAPACITY = 50;
-118  private static final int 
MISSING_MOB_FILES_CACHE_CAPACITY = 20;
-119  private PrintStream out = System.out;
-120  private PrintStream err = System.err;
-121
-122  /**
-123   * The row which the user wants to 
specify and print all the KeyValues for.
-124   */
-125  private byte[] row = null;
-126
-127  private ListPath files = new 
ArrayList();
-128  private int count;
-129
-130  private static final String FOUR_SPACES 
= "";
-131
-132  public HFilePrettyPrinter() {
-133super();
-134init();
-135  }
-136
-137  public HFilePrettyPrinter(Configuration 
conf) {
-138super(conf);
-139init();
-140  }
-141
-142  private void init() {
-143options.addOption("v", 

[14/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

2018-03-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index c36dd6e..314ab37 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -239,15 +239,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-ServerMetricsBuilder.serverName
+HRegionLocation.serverName
 
 
 private ServerName
-ServerMetricsBuilder.ServerMetricsImpl.serverName
+ServerMetricsBuilder.serverName
 
 
 private ServerName
-HRegionLocation.serverName
+ServerMetricsBuilder.ServerMetricsImpl.serverName
 
 
 
@@ -310,33 +310,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterStatus.getMasterName()
-Deprecated.
-
-
-
-ServerName
 ClusterMetrics.getMasterName()
 Returns detailed information about the current master ServerName.
 
 
-
+
 ServerName
-ServerLoad.getServerName()
+ClusterStatus.getMasterName()
 Deprecated.
 
 
+
+ServerName
+HRegionLocation.getServerName()
+
 
 ServerName
-ServerMetrics.getServerName()
+ServerMetricsBuilder.ServerMetricsImpl.getServerName()
 
 
 ServerName
-ServerMetricsBuilder.ServerMetricsImpl.getServerName()
+ServerMetrics.getServerName()
 
 
 ServerName
-HRegionLocation.getServerName()
+ServerLoad.getServerName()
+Deprecated.
+
 
 
 ServerName
@@ -409,13 +409,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterStatus.getBackupMasterNames()
-Deprecated.
-
+ClusterMetrics.getBackupMasterNames()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getBackupMasterNames()
+ClusterStatus.getBackupMasterNames()
+Deprecated.
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -432,13 +432,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterStatus.getDeadServerNames()
-Deprecated.
-
+ClusterMetrics.getDeadServerNames()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getDeadServerNames()
+ClusterStatus.getDeadServerNames()
+Deprecated.
+
 
 
 private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerLoad
@@ -452,13 +452,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterStatus.getLiveServerMetrics()
-Deprecated.
-
+ClusterMetrics.getLiveServerMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetrics.getLiveServerMetrics()
+ClusterStatus.getLiveServerMetrics()
+Deprecated.
+
 
 
 static PairRegionInfo,ServerName
@@ -865,11 +865,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.serverName
+AsyncServerRequestRpcRetryingCaller.serverName
 
 
 private ServerName
-AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.serverName
+ConnectionUtils.ShortCircuitingClusterConnection.serverName
 
 
 private ServerName
@@ -877,11 +877,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-AsyncServerRequestRpcRetryingCaller.serverName
+AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.serverName
 
 
 private ServerName
-ConnectionUtils.ShortCircuitingClusterConnection.serverName
+AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder.serverName
 
 
 
@@ -967,29 +967,29 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName

[14/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

2018-03-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index d3eae32..3332142 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -140,46 +140,50 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.master.snapshot
+org.apache.hadoop.hbase.master.replication
 
 
 
-org.apache.hadoop.hbase.mob
+org.apache.hadoop.hbase.master.snapshot
 
 
 
-org.apache.hadoop.hbase.regionserver
+org.apache.hadoop.hbase.mob
 
 
 
-org.apache.hadoop.hbase.regionserver.handler
+org.apache.hadoop.hbase.regionserver
 
 
 
-org.apache.hadoop.hbase.rest.client
+org.apache.hadoop.hbase.regionserver.handler
 
 
 
-org.apache.hadoop.hbase.rsgroup
+org.apache.hadoop.hbase.rest.client
 
 
 
-org.apache.hadoop.hbase.security.access
+org.apache.hadoop.hbase.rsgroup
 
 
 
-org.apache.hadoop.hbase.security.visibility
+org.apache.hadoop.hbase.security.access
 
 
 
-org.apache.hadoop.hbase.snapshot
+org.apache.hadoop.hbase.security.visibility
 
 
 
-org.apache.hadoop.hbase.tmpl.master
+org.apache.hadoop.hbase.snapshot
 
 
 
+org.apache.hadoop.hbase.tmpl.master
+
+
+
 org.apache.hadoop.hbase.util
 
 
@@ -443,14 +447,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableDescriptor
-HTable.getDescriptor()
-
-
-TableDescriptor
 Table.getDescriptor()
 Gets the table 
descriptor for this table.
 
 
+
+TableDescriptor
+HTable.getDescriptor()
+
 
 TableDescriptor
 Admin.getDescriptor(TableNametableName)
@@ -503,51 +507,51 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-AsyncAdmin.getDescriptor(TableNametableName)
-Method for getting the tableDescriptor
-
+AsyncHBaseAdmin.getDescriptor(TableNametableName)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-RawAsyncHBaseAdmin.getDescriptor(TableNametableName)
+AsyncAdmin.getDescriptor(TableNametableName)
+Method for getting the tableDescriptor
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-AsyncHBaseAdmin.getDescriptor(TableNametableName)
+RawAsyncHBaseAdmin.getDescriptor(TableNametableName)
 
 
 private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 RawAsyncHBaseAdmin.getTableDescriptors(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequestrequest)
 
 
-default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncAdmin.listTableDescriptors()
-List all the userspace tables.
-
-
-
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 Admin.listTableDescriptors()
 List all the userspace tables.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 HBaseAdmin.listTableDescriptors()
 
+
+default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+AsyncAdmin.listTableDescriptors()
+List all the userspace tables.
+
+
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncAdmin.listTableDescriptors(booleanincludeSysTables)

[14/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

2018-03-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
index 08add92..7edabda 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotDescription.html
@@ -137,7 +137,9 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncHBaseAdmin.listSnapshots()
+AsyncAdmin.listSnapshots()
+List completed snapshots.
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
@@ -146,22 +148,22 @@
 
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-HBaseAdmin.listSnapshots()
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
+RawAsyncHBaseAdmin.listSnapshots()
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncAdmin.listSnapshots()
-List completed snapshots.
-
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
+HBaseAdmin.listSnapshots()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-RawAsyncHBaseAdmin.listSnapshots()
+AsyncHBaseAdmin.listSnapshots()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncHBaseAdmin.listSnapshots(https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+AsyncAdmin.listSnapshots(https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+List all the completed snapshots matching the given 
pattern.
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
@@ -170,18 +172,16 @@
 
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-HBaseAdmin.listSnapshots(https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
+RawAsyncHBaseAdmin.listSnapshots(https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
-AsyncAdmin.listSnapshots(https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-List all the completed snapshots matching the given 
pattern.
-

[14/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

2018-03-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
index 9ee12ef..4c42811 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html
@@ -51,889 +51,893 @@
 043import 
org.apache.hadoop.hbase.HConstants;
 044import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
 045import 
org.apache.hadoop.hbase.regionserver.HRegion.FlushResult;
-046import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-047import 
org.apache.hadoop.hbase.trace.TraceUtil;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-050import 
org.apache.hadoop.hbase.util.HasThread;
-051import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-052import 
org.apache.hadoop.hbase.util.Threads;
-053import 
org.apache.hadoop.ipc.RemoteException;
-054import 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
-055import 
org.apache.htrace.core.TraceScope;
-056import 
org.apache.yetus.audience.InterfaceAudience;
-057import org.slf4j.Logger;
-058import org.slf4j.LoggerFactory;
-059
-060/**
-061 * Thread that flushes cache on request
-062 *
-063 * NOTE: This class extends Thread rather 
than Chore because the sleep time
-064 * can be interrupted when there is 
something to do, rather than the Chore
-065 * sleep time which is invariant.
-066 *
-067 * @see FlushRequester
-068 */
-069@InterfaceAudience.Private
-070class MemStoreFlusher implements 
FlushRequester {
-071  private static final Logger LOG = 
LoggerFactory.getLogger(MemStoreFlusher.class);
-072
-073  private Configuration conf;
-074  // These two data members go together.  
Any entry in the one must have
-075  // a corresponding entry in the 
other.
-076  private final 
BlockingQueueFlushQueueEntry flushQueue = new DelayQueue();
-077  private final MapRegion, 
FlushRegionEntry regionsInQueue = new HashMap();
-078  private AtomicBoolean wakeupPending = 
new AtomicBoolean();
-079
-080  private final long 
threadWakeFrequency;
-081  private final HRegionServer server;
-082  private final ReentrantReadWriteLock 
lock = new ReentrantReadWriteLock();
-083  private final Object blockSignal = new 
Object();
-084
-085  private long blockingWaitTime;
-086  private final LongAdder 
updatesBlockedMsHighWater = new LongAdder();
-087
-088  private final FlushHandler[] 
flushHandlers;
-089  private 
ListFlushRequestListener flushRequestListeners = new 
ArrayList(1);
-090
-091  private FlushType flushType;
-092
-093  /**
-094   * Singleton instance inserted into 
flush queue used for signaling.
-095   */
-096  private static final FlushQueueEntry 
WAKEUPFLUSH_INSTANCE = new FlushQueueEntry() {
-097@Override
-098public long getDelay(TimeUnit unit) 
{
-099  return 0;
-100}
-101
-102@Override
-103public int compareTo(Delayed o) {
-104  return -1;
-105}
-106
-107@Override
-108public boolean equals(Object obj) {
-109  return obj == this;
-110}
-111
-112@Override
-113public int hashCode() {
-114  return 42;
-115}
-116  };
+046import 
org.apache.hadoop.hbase.trace.TraceUtil;
+047import 
org.apache.hadoop.hbase.util.Bytes;
+048import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+049import 
org.apache.hadoop.hbase.util.HasThread;
+050import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
+051import 
org.apache.hadoop.hbase.util.Threads;
+052import 
org.apache.hadoop.ipc.RemoteException;
+053import 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
+054import 
org.apache.htrace.core.TraceScope;
+055import 
org.apache.yetus.audience.InterfaceAudience;
+056import org.slf4j.Logger;
+057import org.slf4j.LoggerFactory;
+058
+059/**
+060 * Thread that flushes cache on request
+061 *
+062 * NOTE: This class extends Thread rather 
than Chore because the sleep time
+063 * can be interrupted when there is 
something to do, rather than the Chore
+064 * sleep time which is invariant.
+065 *
+066 * @see FlushRequester
+067 */
+068@InterfaceAudience.Private
+069class MemStoreFlusher implements 
FlushRequester {
+070  private static final Logger LOG = 
LoggerFactory.getLogger(MemStoreFlusher.class);
+071
+072  private Configuration conf;
+073  // These two data members go together.  
Any entry in the one must have
+074  // a corresponding entry in the 
other.
+075  private final 
BlockingQueueFlushQueueEntry flushQueue = new DelayQueue();
+076  private final MapRegion, 
FlushRegionEntry regionsInQueue = new HashMap();
+077  private AtomicBoolean wakeupPending = 
new AtomicBoolean();
+078
+079  private final 

[14/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

2018-03-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.StoreFileSplitter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.StoreFileSplitter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.StoreFileSplitter.html
index d984f0e..c1f4d85 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.StoreFileSplitter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.StoreFileSplitter.html
@@ -115,753 +115,756 @@
 107  final RegionInfo regionToSplit, 
final byte[] splitRow) throws IOException {
 108super(env, regionToSplit);
 109preflightChecks(env, true);
-110this.bestSplitRow = splitRow;
-111checkSplittable(env, regionToSplit, 
bestSplitRow);
-112final TableName table = 
regionToSplit.getTable();
-113final long rid = 
getDaughterRegionIdTimestamp(regionToSplit);
-114this.daughter_1_RI = 
RegionInfoBuilder.newBuilder(table)
-115
.setStartKey(regionToSplit.getStartKey())
-116.setEndKey(bestSplitRow)
-117.setSplit(false)
-118.setRegionId(rid)
-119.build();
-120this.daughter_2_RI = 
RegionInfoBuilder.newBuilder(table)
-121.setStartKey(bestSplitRow)
-122
.setEndKey(regionToSplit.getEndKey())
-123.setSplit(false)
-124.setRegionId(rid)
-125.build();
-126TableDescriptor htd = 
env.getMasterServices().getTableDescriptors().get(getTableName());
-127
if(htd.getRegionSplitPolicyClassName() != null) {
-128  // Since we don't have region 
reference here, creating the split policy instance without it.
-129  // This can be used to invoke 
methods which don't require Region reference. This instantiation
-130  // of a class on Master-side though 
it only makes sense on the RegionServer-side is
-131  // for Phoenix Local Indexing. 
Refer HBASE-12583 for more information.
-132  Class? extends 
RegionSplitPolicy clazz =
-133  
RegionSplitPolicy.getSplitPolicyClass(htd, env.getMasterConfiguration());
-134  this.splitPolicy = 
ReflectionUtils.newInstance(clazz, env.getMasterConfiguration());
-135}
-136  }
-137
-138  /**
-139   * Check whether the region is 
splittable
-140   * @param env MasterProcedureEnv
-141   * @param regionToSplit parent Region 
to be split
-142   * @param splitRow if splitRow is not 
specified, will first try to get bestSplitRow from RS
-143   * @throws IOException
-144   */
-145  private void checkSplittable(final 
MasterProcedureEnv env,
-146  final RegionInfo regionToSplit, 
final byte[] splitRow) throws IOException {
-147// Ask the remote RS if this region 
is splittable.
-148// If we get an IOE, report it along 
w/ the failure so can see why we are not splittable at this time.
-149if(regionToSplit.getReplicaId() != 
RegionInfo.DEFAULT_REPLICA_ID) {
-150  throw new IllegalArgumentException 
("Can't invoke split on non-default regions directly");
-151}
-152RegionStateNode node =
-153
env.getAssignmentManager().getRegionStates().getRegionStateNode(getParentRegion());
-154IOException splittableCheckIOE = 
null;
-155boolean splittable = false;
-156if (node != null) {
-157  try {
-158if (bestSplitRow == null || 
bestSplitRow.length == 0) {
-159  LOG.info("splitKey isn't 
explicitly specified, " + " will try to find a best split key from RS");
-160}
-161// Always set bestSplitRow 
request as true here,
-162// need to call Region#checkSplit 
to check it splittable or not
-163GetRegionInfoResponse response 
=
-164
Util.getRegionInfoResponse(env, node.getRegionLocation(), node.getRegionInfo(), 
true);
-165if(bestSplitRow == null || 
bestSplitRow.length == 0) {
-166  bestSplitRow = 
response.hasBestSplitRow() ? response.getBestSplitRow().toByteArray() : null;
-167}
-168splittable = 
response.hasSplittable()  response.getSplittable();
-169
-170if (LOG.isDebugEnabled()) {
-171  LOG.debug("Splittable=" + 
splittable + " " + node.toShortString());
-172}
-173  } catch (IOException e) {
-174splittableCheckIOE = e;
-175  }
-176}
-177
-178if (!splittable) {
-179  IOException e = new 
IOException(regionToSplit.getShortNameToLog() + " NOT splittable");
-180  if (splittableCheckIOE != null) 
e.initCause(splittableCheckIOE);
-181  throw e;
-182}
-183
-184if(bestSplitRow == null || 
bestSplitRow.length == 0) {
-185  throw new 
DoNotRetryIOException("Region not splittable because bestSplitPoint = null, "
-186  + "maybe table is too small for 
auto split. For force split, try specifying split 

[14/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHostRegionCoprocessor, RegionCoprocessorEnvironment {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMapString, 

[14/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

2018-03-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index 90150e6..36ebb94 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class HRegion.WriteState
+static class HRegion.WriteState
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -239,7 +239,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 flushing
-volatileboolean flushing
+volatileboolean flushing
 
 
 
@@ -248,7 +248,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 flushRequested
-volatileboolean flushRequested
+volatileboolean flushRequested
 
 
 
@@ -257,7 +257,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 compacting
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger compacting
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger compacting
 
 
 
@@ -266,7 +266,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 writesEnabled
-volatileboolean writesEnabled
+volatileboolean writesEnabled
 
 
 
@@ -275,7 +275,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 readOnly
-volatileboolean readOnly
+volatileboolean readOnly
 
 
 
@@ -284,7 +284,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 readsEnabled
-volatileboolean readsEnabled
+volatileboolean readsEnabled
 
 
 
@@ -293,7 +293,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 HEAP_SIZE
-static finallong HEAP_SIZE
+static finallong HEAP_SIZE
 
 
 
@@ -310,7 +310,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 WriteState
-WriteState()
+WriteState()
 
 
 
@@ -327,7 +327,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setReadOnly
-voidsetReadOnly(booleanonOff)
+voidsetReadOnly(booleanonOff)
 Set flags that make this region read-only.
 
 Parameters:
@@ -341,7 +341,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 isReadOnly
-booleanisReadOnly()
+booleanisReadOnly()
 
 
 
@@ -350,7 +350,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 isFlushRequested
-booleanisFlushRequested()
+booleanisFlushRequested()
 
 
 
@@ -359,7 +359,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setReadsEnabled
-voidsetReadsEnabled(booleanreadsEnabled)
+voidsetReadsEnabled(booleanreadsEnabled)
 
 
 



[14/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

2018-03-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 5f975db..3b7627b 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -151,119 +151,119 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cellcell)
+FilterListWithAND.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cellc)
+ValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-RowFilter.filterCell(Cellv)
+SkipFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cellc)
+FamilyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-Filter.filterCell(Cellc)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+ColumnPrefixFilter.filterCell(Cellcell)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cellc)
+PageFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cellc)
+RowFilter.filterCell(Cellv)
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cellc)
+ColumnRangeFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cellc)
+ColumnCountGetFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cellc)
+MultipleColumnPrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cellignored)
+ColumnPaginationFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cellc)
+DependentColumnFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cellc)
+FilterListWithOR.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cellc)
+InclusiveStopFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cellc)
+KeyOnlyFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterCell(Cellc)
+MultiRowRangeFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FilterListWithAND.filterCell(Cellc)
+Filter.filterCell(Cellc)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cellc)
+FirstKeyOnlyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnValueFilter.filterCell(Cellc)
+WhileMatchFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cellignored)
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cellc)
+TimestampsFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cellc)
+FuzzyRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
-Deprecated.
-
+FilterList.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cellignored)
+RandomRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterListWithOR.filterCell(Cellc)
+PrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cellc)
+SingleColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cellc)
+ColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cellc)
+QualifierFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
@@ -279,158 +279,158 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellc)
-Deprecated.
-
+FilterListBase.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+FamilyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-Filter.filterKeyValue(Cellc)
-Deprecated.
-As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use filterCell(Cell)
-
+ColumnPrefixFilter.filterKeyValue(Cellc)
+Deprecated.
 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellc)
+PageFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellc)
+RowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellc)
+ColumnRangeFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellc)
+ColumnCountGetFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellc)
+MultipleColumnPrefixFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+ColumnPaginationFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode

[14/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

2018-03-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index f5f674e..af78556 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -443,13 +443,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableDescriptor
-Table.getDescriptor()
-Gets the table 
descriptor for this table.
-
+HTable.getDescriptor()
 
 
 TableDescriptor
-HTable.getDescriptor()
+Table.getDescriptor()
+Gets the table 
descriptor for this table.
+
 
 
 TableDescriptor
@@ -503,52 +503,52 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
-AsyncHBaseAdmin.getDescriptor(TableNametableName)
-
-
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
 AsyncAdmin.getDescriptor(TableNametableName)
 Method for getting the tableDescriptor
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
 RawAsyncHBaseAdmin.getDescriptor(TableNametableName)
 
+
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableDescriptor
+AsyncHBaseAdmin.getDescriptor(TableNametableName)
+
 
 private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 RawAsyncHBaseAdmin.getTableDescriptors(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequestrequest)
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-Admin.listTableDescriptors()
+default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+AsyncAdmin.listTableDescriptors()
 List all the userspace tables.
 
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-HBaseAdmin.listTableDescriptors()
-
-
-default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncAdmin.listTableDescriptors()
+Admin.listTableDescriptors()
 List all the userspace tables.
 
 
-
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
-AsyncHBaseAdmin.listTableDescriptors(booleanincludeSysTables)
-
 
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+HBaseAdmin.listTableDescriptors()
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 AsyncAdmin.listTableDescriptors(booleanincludeSysTables)
 List all the tables.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 

[14/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

2018-03-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
index df5fa53..8fffb89 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
@@ -42,1927 +42,2060 @@
 034import java.util.TreeMap;
 035import java.util.regex.Matcher;
 036import java.util.regex.Pattern;
-037import 
org.apache.hadoop.conf.Configuration;
-038import 
org.apache.hadoop.hbase.Cell.Type;
-039import 
org.apache.hadoop.hbase.client.Connection;
-040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-041import 
org.apache.hadoop.hbase.client.Consistency;
-042import 
org.apache.hadoop.hbase.client.Delete;
-043import 
org.apache.hadoop.hbase.client.Get;
-044import 
org.apache.hadoop.hbase.client.Mutation;
-045import 
org.apache.hadoop.hbase.client.Put;
-046import 
org.apache.hadoop.hbase.client.RegionInfo;
-047import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-048import 
org.apache.hadoop.hbase.client.RegionLocator;
-049import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-050import 
org.apache.hadoop.hbase.client.RegionServerCallable;
-051import 
org.apache.hadoop.hbase.client.Result;
-052import 
org.apache.hadoop.hbase.client.ResultScanner;
-053import 
org.apache.hadoop.hbase.client.Scan;
-054import 
org.apache.hadoop.hbase.client.Table;
-055import 
org.apache.hadoop.hbase.client.TableState;
-056import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-057import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-058import 
org.apache.hadoop.hbase.master.RegionState;
-059import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-060import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-061import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
-062import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-063import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
-064import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
-065import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
-066import 
org.apache.hadoop.hbase.util.Bytes;
-067import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-068import 
org.apache.hadoop.hbase.util.ExceptionUtil;
-069import 
org.apache.hadoop.hbase.util.Pair;
-070import 
org.apache.hadoop.hbase.util.PairOfSameType;
-071import 
org.apache.yetus.audience.InterfaceAudience;
-072import org.slf4j.Logger;
-073import org.slf4j.LoggerFactory;
-074
-075import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-076
-077/**
-078 * p
-079 * Read/write operations on region and 
assignment information store in codehbase:meta/code.
-080 * /p
+037import java.util.stream.Collectors;
+038import java.util.stream.Stream;
+039import 
org.apache.hadoop.conf.Configuration;
+040import 
org.apache.hadoop.hbase.Cell.Type;
+041import 
org.apache.hadoop.hbase.client.Connection;
+042import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+043import 
org.apache.hadoop.hbase.client.Consistency;
+044import 
org.apache.hadoop.hbase.client.Delete;
+045import 
org.apache.hadoop.hbase.client.Get;
+046import 
org.apache.hadoop.hbase.client.Mutation;
+047import 
org.apache.hadoop.hbase.client.Put;
+048import 
org.apache.hadoop.hbase.client.RegionInfo;
+049import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
+050import 
org.apache.hadoop.hbase.client.RegionLocator;
+051import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
+052import 
org.apache.hadoop.hbase.client.RegionServerCallable;
+053import 
org.apache.hadoop.hbase.client.Result;
+054import 
org.apache.hadoop.hbase.client.ResultScanner;
+055import 
org.apache.hadoop.hbase.client.Scan;
+056import 
org.apache.hadoop.hbase.client.Table;
+057import 
org.apache.hadoop.hbase.client.TableState;
+058import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+059import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+060import 
org.apache.hadoop.hbase.master.RegionState;
+061import 
org.apache.hadoop.hbase.master.RegionState.State;
+062import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+063import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+064import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
+065import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
+066import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
+067import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;

[14/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

2018-03-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/client/MasterSwitchType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/MasterSwitchType.html 
b/apidocs/org/apache/hadoop/hbase/client/MasterSwitchType.html
index 1fce8cc..a8eec5a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/MasterSwitchType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/MasterSwitchType.html
@@ -97,10 +97,10 @@ var activeTableTab = "activeTableTab";
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">java.lang.EnumMasterSwitchType
+https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">java.lang.EnumMasterSwitchType
 
 
 org.apache.hadoop.hbase.client.MasterSwitchType
@@ -114,13 +114,13 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableMasterSwitchType
+https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableMasterSwitchType
 
 
 
 @InterfaceAudience.Public
 public enum MasterSwitchType
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumMasterSwitchType
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumMasterSwitchType
 Represents the master switch type
 
 
@@ -162,7 +162,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is
 
 
 static MasterSwitchType
-valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 
 
@@ -178,15 +178,15 @@ the order they are declared.
 
 
 
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">Enum
-http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#compareTo-E-;
 title="class or interface in java.lang">compareTo, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#getDeclaringClass--;
 title="class or interface in java.lang">getDeclaringClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/
 api/java/lang/Enum.html?is-external=true#name--" title="class or interface in 
java.lang">name, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#ordinal--;
 title="class or interface in java.lang">ordinal, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#valueOf-java.lang.Class-java.lang.String-;
 title="class or interface in java.lang">valueOf
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">Enum
+https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#compareTo-E-;
 title="class or interface in java.lang">compareTo, https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#equals-java.lang.Object-;

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
index 93f650f..d7aa8b1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
@@ -546,1472 +546,1464 @@
 538return this.conf;
 539  }
 540
-541  /**
-542   * @return true if the master is 
running, throws an exception otherwise
-543   * @throws 
org.apache.hadoop.hbase.MasterNotRunningException - if the master is not 
running
-544   * @deprecated this has been deprecated 
without a replacement
-545   */
-546  @Deprecated
-547  @Override
-548  public boolean isMasterRunning()
-549  throws MasterNotRunningException, 
ZooKeeperConnectionException {
-550// When getting the master 
connection, we check it's running,
-551// so if there is no exception, it 
means we've been able to get a
-552// connection on a running master
-553MasterKeepAliveConnection m = 
getKeepAliveMasterService();
-554m.close();
-555return true;
-556  }
-557
-558  @Override
-559  public HRegionLocation 
getRegionLocation(final TableName tableName,
-560  final byte [] row, boolean 
reload)
-561  throws IOException {
-562return reload? 
relocateRegion(tableName, row): locateRegion(tableName, row);
-563  }
-564
-565
-566  @Override
-567  public boolean isTableEnabled(TableName 
tableName) throws IOException {
-568return 
getTableState(tableName).inStates(TableState.State.ENABLED);
-569  }
-570
-571  @Override
-572  public boolean 
isTableDisabled(TableName tableName) throws IOException {
-573return 
getTableState(tableName).inStates(TableState.State.DISABLED);
-574  }
-575
-576  @Override
-577  public boolean isTableAvailable(final 
TableName tableName, @Nullable final byte[][] splitKeys)
-578  throws IOException {
-579if (this.closed) {
-580  throw new IOException(toString() + 
" closed");
-581}
-582try {
-583  if (!isTableEnabled(tableName)) {
-584LOG.debug("Table " + tableName + 
" not enabled");
-585return false;
-586  }
-587  ListPairRegionInfo, 
ServerName locations =
-588
MetaTableAccessor.getTableRegionsAndLocations(this, tableName, true);
-589
-590  int notDeployed = 0;
-591  int regionCount = 0;
-592  for (PairRegionInfo, 
ServerName pair : locations) {
-593RegionInfo info = 
pair.getFirst();
-594if (pair.getSecond() == null) {
-595  if (LOG.isDebugEnabled()) {
-596LOG.debug("Table " + 
tableName + " has not deployed region " + pair.getFirst()
-597.getEncodedName());
-598  }
-599  notDeployed++;
-600} else if (splitKeys != null
-601 
!Bytes.equals(info.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) {
-602  for (byte[] splitKey : 
splitKeys) {
-603// Just check if the splitkey 
is available
-604if 
(Bytes.equals(info.getStartKey(), splitKey)) {
-605  regionCount++;
-606  break;
-607}
-608  }
-609} else {
-610  // Always empty start row 
should be counted
-611  regionCount++;
-612}
-613  }
-614  if (notDeployed  0) {
-615if (LOG.isDebugEnabled()) {
-616  LOG.debug("Table " + tableName 
+ " has " + notDeployed + " regions");
-617}
-618return false;
-619  } else if (splitKeys != null 
 regionCount != splitKeys.length + 1) {
-620if (LOG.isDebugEnabled()) {
-621  LOG.debug("Table " + tableName 
+ " expected to have " + (splitKeys.length + 1)
-622  + " regions, but only " + 
regionCount + " available");
-623}
-624return false;
-625  } else {
-626if (LOG.isDebugEnabled()) {
-627  LOG.debug("Table " + tableName 
+ " should be available");
-628}
-629return true;
-630  }
-631} catch (TableNotFoundException tnfe) 
{
-632  LOG.warn("Table " + tableName + " 
not enabled, it is not exists");
-633  return false;
-634}
-635  }
-636
-637  @Override
-638  public HRegionLocation 
locateRegion(final byte[] regionName) throws IOException {
-639RegionLocations locations = 
locateRegion(RegionInfo.getTable(regionName),
-640  RegionInfo.getStartKey(regionName), 
false, true);
-641return locations == null ? null : 
locations.getRegionLocation();
+541  private void checkClosed() throws 
DoNotRetryIOException {
+542if (this.closed) {
+543  throw new 
DoNotRetryIOException(toString() + " 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
index 802b925..a3e80ab 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
@@ -73,229 +73,229 @@
 065import 
java.util.concurrent.TimeoutException;
 066import 
java.util.concurrent.atomic.AtomicBoolean;
 067import 
java.util.concurrent.atomic.AtomicInteger;
-068import 
java.util.concurrent.atomic.AtomicLong;
-069import 
java.util.concurrent.atomic.LongAdder;
-070import java.util.concurrent.locks.Lock;
-071import 
java.util.concurrent.locks.ReadWriteLock;
-072import 
java.util.concurrent.locks.ReentrantReadWriteLock;
-073import java.util.function.Function;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.HRegionInfo;
-094import 
org.apache.hadoop.hbase.KeyValue;
-095import 
org.apache.hadoop.hbase.KeyValueUtil;
-096import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-097import 
org.apache.hadoop.hbase.NotServingRegionException;
-098import 
org.apache.hadoop.hbase.PrivateCellUtil;
-099import 
org.apache.hadoop.hbase.RegionTooBusyException;
-100import 
org.apache.hadoop.hbase.TableName;
-101import org.apache.hadoop.hbase.Tag;
-102import org.apache.hadoop.hbase.TagUtil;
-103import 
org.apache.hadoop.hbase.UnknownScannerException;
-104import 
org.apache.hadoop.hbase.client.Append;
-105import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-106import 
org.apache.hadoop.hbase.client.CompactionState;
-107import 
org.apache.hadoop.hbase.client.Delete;
-108import 
org.apache.hadoop.hbase.client.Durability;
-109import 
org.apache.hadoop.hbase.client.Get;
-110import 
org.apache.hadoop.hbase.client.Increment;
-111import 
org.apache.hadoop.hbase.client.IsolationLevel;
-112import 
org.apache.hadoop.hbase.client.Mutation;
-113import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-114import 
org.apache.hadoop.hbase.client.Put;
-115import 
org.apache.hadoop.hbase.client.RegionInfo;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
index bd13b53..802b925 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
@@ -900,7600 +900,7598 @@
 892if 
(this.getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {
 893  status.setStatus("Writing region 
info on filesystem");
 894  fs.checkRegionInfoOnFilesystem();
-895} else {
-896  if (LOG.isDebugEnabled()) {
-897LOG.debug("Skipping creation of 
.regioninfo file for " + this.getRegionInfo());
-898  }
-899}
-900
-901// Initialize all the HStores
-902status.setStatus("Initializing all 
the Stores");
-903long maxSeqId = 
initializeStores(reporter, status);
-904this.mvcc.advanceTo(maxSeqId);
-905if 
(ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
-906  CollectionHStore stores = 
this.stores.values();
-907  try {
-908// update the stores that we are 
replaying
-909
stores.forEach(HStore::startReplayingFromWAL);
-910// Recover any edits if 
available.
-911maxSeqId = Math.max(maxSeqId,
-912  
replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
status));
-913// Make sure mvcc is up to max.
-914this.mvcc.advanceTo(maxSeqId);
-915  } finally {
-916// update the stores that we are 
done replaying
-917
stores.forEach(HStore::stopReplayingFromWAL);
-918  }
-919}
-920this.lastReplayedOpenRegionSeqId = 
maxSeqId;
+895}
+896
+897// Initialize all the HStores
+898status.setStatus("Initializing all 
the Stores");
+899long maxSeqId = 
initializeStores(reporter, status);
+900this.mvcc.advanceTo(maxSeqId);
+901if 
(ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
+902  CollectionHStore stores = 
this.stores.values();
+903  try {
+904// update the stores that we are 
replaying
+905
stores.forEach(HStore::startReplayingFromWAL);
+906// Recover any edits if 
available.
+907maxSeqId = Math.max(maxSeqId,
+908  
replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
status));
+909// Make sure mvcc is up to max.
+910this.mvcc.advanceTo(maxSeqId);
+911  } finally {
+912// update the stores that we are 
done replaying
+913
stores.forEach(HStore::stopReplayingFromWAL);
+914  }
+915}
+916this.lastReplayedOpenRegionSeqId = 
maxSeqId;
+917
+918
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
+919this.writestate.flushRequested = 
false;
+920this.writestate.compacting.set(0);
 921
-922
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
-923this.writestate.flushRequested = 
false;
-924this.writestate.compacting.set(0);
-925
-926if (this.writestate.writesEnabled) 
{
-927  // Remove temporary data left over 
from old regions
-928  status.setStatus("Cleaning up 
temporary data from old regions");
-929  fs.cleanupTempDir();
-930}
-931
-932if (this.writestate.writesEnabled) 
{
-933  status.setStatus("Cleaning up 
detritus from prior splits");
-934  // Get rid of any splits or merges 
that were lost in-progress.  Clean out
-935  // these directories here on open.  
We may be opening a region that was
-936  // being split but we crashed in 
the middle of it all.
-937  fs.cleanupAnySplitDetritus();
-938  fs.cleanupMergesDir();
-939}
-940
-941// Initialize split policy
-942this.splitPolicy = 
RegionSplitPolicy.create(this, conf);
-943
-944// Initialize flush policy
-945this.flushPolicy = 
FlushPolicyFactory.create(this, conf);
-946
-947long lastFlushTime = 
EnvironmentEdgeManager.currentTime();
-948for (HStore store: stores.values()) 
{
-949  
this.lastStoreFlushTimeMap.put(store, lastFlushTime);
-950}
-951
-952// Use maximum of log sequenceid or 
that which was found in stores
-953// (particularly if no recovered 
edits, seqid will be -1).
-954long nextSeqid = maxSeqId;
-955if (this.writestate.writesEnabled) 
{
-956  nextSeqid = 
WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(),
-957  this.fs.getRegionDir(), 
nextSeqid, 1);
-958} else {
-959  nextSeqid++;
-960}
-961
-962LOG.info("Onlined " + 
this.getRegionInfo().getShortNameToLog() +
-963  "; next sequenceid=" + 
nextSeqid);
+922if 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index c7d05d1..abcb738 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -143,18 +143,18 @@
 
 
 void
-HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
-
-
-void
 NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 
-
+
 void
 HFileDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 Save metadata in HFile which will be written to disk
 
 
+
+void
+HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
+
 
 
 
@@ -203,18 +203,18 @@
 
 
 
-void
-RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
+abstract void
+BloomContext.addLastBloomKey(HFile.Writerwriter)
+Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
+
 
 
 void
 RowBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
-abstract void
-BloomContext.addLastBloomKey(HFile.Writerwriter)
-Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
-
+void
+RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
 static BloomFilterWriter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
index 479b9d3..274bfad 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
@@ -106,15 +106,15 @@
 
 
 
+private HFileBlock.Writer
+HFileBlockIndex.BlockIndexWriter.blockWriter
+
+
 protected HFileBlock.Writer
 HFileWriterImpl.blockWriter
 block writer
 
 
-
-private HFileBlock.Writer
-HFileBlockIndex.BlockIndexWriter.blockWriter
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index 0c892c8..b293c97 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -136,15 +136,15 @@
 
 
 HFileContext
-HFileBlockDecodingContext.getHFileContext()
+HFileBlockEncodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockDefaultDecodingContext.getHFileContext()
+HFileBlockDecodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockEncodingContext.getHFileContext()
+HFileBlockDefaultDecodingContext.getHFileContext()
 
 
 HFileContext
@@ -224,23 +224,23 @@
 
 
 private HFileContext
+HFile.WriterFactory.fileContext
+
+
+private HFileContext
 HFileBlock.fileContext
 Meta data that holds meta information on the 
hfileblock.
 
 
-
+
 private HFileContext
 HFileBlock.Writer.fileContext
 Meta data that holds information about the hfileblock
 
 
-
-private HFileContext
-HFileBlock.FSReaderImpl.fileContext
-
 
 private HFileContext
-HFile.WriterFactory.fileContext
+HFileBlock.FSReaderImpl.fileContext
 
 
 private HFileContext
@@ -277,20 +277,20 @@
 
 
 HFileContext
-HFileWriterImpl.getFileContext()
-
-
-HFileContext
 HFile.Writer.getFileContext()
 Return the file context for the HFile this writer belongs 
to
 
 
-
+
 HFileContext
 HFile.Reader.getFileContext()
 Return the file context of the HFile this reader belongs 
to
 
 
+
+HFileContext
+HFileWriterImpl.getFileContext()
+
 
 HFileContext
 HFileReaderImpl.getFileContext()
@@ -323,35 +323,35 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
-
-HFileBlockEncodingContext
-HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[]dummyHeader,
-   HFileContextfileContext)
-
 
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
+
 HFileBlockEncodingContext
 NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[]dummyHeader,
HFileContextmeta)
 
-
+
 HFileBlockEncodingContext
 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 0c342b2..bb2794a 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -151,115 +151,115 @@
 
 
 Filter.ReturnCode
-FilterListWithAND.filterCell(Cellc)
+ColumnPrefixFilter.filterCell(Cellcell)
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cellc)
+ColumnCountGetFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cellc)
+RowFilter.filterCell(Cellv)
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cellc)
+FuzzyRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cellcell)
+Filter.filterCell(Cellc)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cellignored)
+RandomRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-RowFilter.filterCell(Cellv)
+FirstKeyOnlyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cellc)
+SkipFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cellc)
+TimestampsFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cellc)
+ValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterCell(Cellc)
+KeyOnlyFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cellc)
+FamilyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterListWithOR.filterCell(Cellc)
+QualifierFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cellc)
+FilterList.filterCell(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cellignored)
+ColumnRangeFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cellignored)
+ColumnPaginationFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-Filter.filterCell(Cellc)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+FilterListWithAND.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cellc)
+WhileMatchFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cellc)
+MultiRowRangeFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
-Deprecated.
-
+PrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cellc)
+DependentColumnFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cellc)
+PageFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cellc)
+FilterListWithOR.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cellc)
+InclusiveStopFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cellc)
+MultipleColumnPrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cellc)
+SingleColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
@@ -275,158 +275,158 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellc)
+ColumnPrefixFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellc)
+ColumnCountGetFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FilterListBase.filterKeyValue(Cellc)
+RowFilter.filterKeyValue(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellc)
+FuzzyRowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellc)
-Deprecated.
+Filter.filterKeyValue(Cellc)
+Deprecated.
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+ Instead use filterCell(Cell)
+
 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cellc)
+RandomRowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellc)
+FirstKeyOnlyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellc)
+TimestampsFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellc)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 Deprecated.
 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+FamilyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
index fe5ef34..7161108 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
@@ -166,27 +166,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparatorcomparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+RowIndexCodecV1.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
@@ -198,13 +198,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
-   HFileBlockDecodingContextdecodingCtx)
+BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+   HFileBlockDecodingContextblkDecodingCtx)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
-   HFileBlockDecodingContextblkDecodingCtx)
+RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+   HFileBlockDecodingContextdecodingCtx)
 
 
 
@@ -279,18 +279,18 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
+
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index 66443b9..79b047f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -116,36 +116,36 @@
  HFileBlockDefaultDecodingContextdecodingCtx)
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+protected abstract http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
+BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Call.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Call.html
 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Call.html
index e56ae0d..983064e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Call.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Call.html
@@ -110,6 +110,13 @@
 
 
 T extends 
com.google.protobuf.Service,Rhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],R
+HTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
+  byte[]startKey,
+  byte[]endKey,
+  Batch.CallT,Rcallable)
+
+
+T extends 
com.google.protobuf.Service,Rhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],R
 Table.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
   byte[]endKey,
@@ -120,14 +127,15 @@
  with each Service instance.
 
 
-
-T extends 
com.google.protobuf.Service,Rhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],R
-HTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
+
+T extends 
com.google.protobuf.Service,Rvoid
+HTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
   byte[]endKey,
-  Batch.CallT,Rcallable)
+  Batch.CallT,Rcallable,
+  Batch.CallbackRcallback)
 
-
+
 T extends 
com.google.protobuf.Service,Rvoid
 Table.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
@@ -140,14 +148,6 @@
  with each Service instance.
 
 
-
-T extends 
com.google.protobuf.Service,Rvoid
-HTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
-  byte[]startKey,
-  byte[]endKey,
-  Batch.CallT,Rcallable,
-  Batch.CallbackRcallback)
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
index bf8d672..61695fd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
@@ -113,17 +113,17 @@
 
 
 
-private Batch.CallbackCResult
-AsyncRequestFutureImpl.callback
-
-
 private Batch.CallbackT
 AsyncProcessTask.callback
 
-
+
 private Batch.CallbackT
 AsyncProcessTask.Builder.callback
 
+
+private Batch.CallbackCResult
+AsyncRequestFutureImpl.callback
+
 
 
 
@@ -148,42 +148,50 @@
 
 
 Rvoid
-Table.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+HTable.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
- Batch.CallbackRcallback)
-Same as Table.batch(List,
 Object[]), but with a callback.
-
+ Batch.CallbackRcallback)
 
 
 Rvoid
-HTable.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+Table.batchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
index fe5ef34..7161108 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
@@ -166,27 +166,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparatorcomparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+RowIndexCodecV1.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
@@ -198,13 +198,13 @@
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
-   HFileBlockDecodingContextdecodingCtx)
+BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+   HFileBlockDecodingContextblkDecodingCtx)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
-   HFileBlockDecodingContextblkDecodingCtx)
+RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+   HFileBlockDecodingContextdecodingCtx)
 
 
 
@@ -279,18 +279,18 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
+
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index 66443b9..79b047f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -116,36 +116,36 @@
  HFileBlockDefaultDecodingContextdecodingCtx)
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
-CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,
+protected abstract http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
+BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
 title="class or interface in java.io">DataInputStreamsource,

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
index b8ce496..570fb68 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
@@ -168,27 +168,39 @@
 
 
 void
+CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf)
+
+
+void
 BlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf)
 Add block to cache (defaults to not in-memory).
 
 
-
+
 void
 LruBlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf)
 Cache the block with the specified name and buffer.
 
 
-
+
 void
-CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf)
 
+
+void
+CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf,
+  booleaninMemory)
+
 
 void
-MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf)
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+  Cacheablebuf,
+  booleaninMemory)
 
 
 void
@@ -208,18 +220,6 @@
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf,
-  booleaninMemory)
-
-
-void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-  Cacheablebuf,
-  booleaninMemory)
-
-
-void
 MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
   Cacheablebuf,
   booleaninMemory)
@@ -232,21 +232,21 @@
 
 
 boolean
-BlockCache.evictBlock(BlockCacheKeycacheKey)
-Evict block from cache.
-
+CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
-LruBlockCache.evictBlock(BlockCacheKeycacheKey)
+InclusiveCombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
-CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
+BlockCache.evictBlock(BlockCacheKeycacheKey)
+Evict block from cache.
+
 
 
 boolean
-InclusiveCombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
+LruBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
@@ -254,35 +254,35 @@
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKeycacheKey,
+CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
-Fetch block from cache.
-
+booleanupdateCacheMetrics)
 
 
 Cacheable
-LruBlockCache.getBlock(BlockCacheKeycacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
-Get the buffer of the block with the specified name.
-
+booleanupdateCacheMetrics)
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
+BlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
+booleanupdateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
+LruBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
+booleanupdateCacheMetrics)
+Get the buffer of the block with the specified name.
+
 
 
 Cacheable
@@ -308,6 +308,11 @@
 CombinedBlockCache.getRefCount(BlockCacheKeycacheKey)
 
 
+void
+CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
+   Cacheableblock)
+
+
 default void
 BlockCache.returnBlock(BlockCacheKeycacheKey,
Cacheableblock)
@@ -315,11 +320,6 @@
  is over.
 
 
-
-void
-CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
-   Cacheableblock)
-
 
 
 
@@ -497,13 +497,13 @@
 
 
 void
-CachedEntryQueue.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryentry)
-Attempt to add the specified entry to this queue.
-
+BucketCache.BucketEntryGroup.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
 
 
 void
-BucketCache.BucketEntryGroup.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
+CachedEntryQueue.add(http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryentry)
+Attempt to add the specified entry to this queue.
+
 
 
 


[14/51] [partial] hbase-site git commit: Published site at .

2018-02-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
index 77fb9b5..c4e8c8b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
@@ -165,520 +165,519 @@
 157E env = 
checkAndLoadInstance(implClass, priority, conf);
 158if (env != null) {
 159  
this.coprocEnvironments.add(env);
-160  LOG.info(
-161  "System coprocessor " + 
className + " was loaded " + "successfully with priority (" + priority + 
").");
-162  ++priority;
-163}
-164  } catch (Throwable t) {
-165// We always abort if system 
coprocessors cannot be loaded
-166abortServer(className, t);
-167  }
-168}
-169  }
-170
-171  /**
-172   * Load a coprocessor implementation 
into the host
-173   * @param path path to implementation 
jar
-174   * @param className the main class 
name
-175   * @param priority chaining priority
-176   * @param conf configuration for 
coprocessor
-177   * @throws java.io.IOException 
Exception
-178   */
-179  public E load(Path path, String 
className, int priority,
-180  Configuration conf) throws 
IOException {
-181String[] includedClassPrefixes = 
null;
-182if 
(conf.get(HConstants.CP_HTD_ATTR_INCLUSION_KEY) != null){
-183  String prefixes = 
conf.get(HConstants.CP_HTD_ATTR_INCLUSION_KEY);
-184  includedClassPrefixes = 
prefixes.split(";");
-185}
-186return load(path, className, 
priority, conf, includedClassPrefixes);
-187  }
-188
-189  /**
-190   * Load a coprocessor implementation 
into the host
-191   * @param path path to implementation 
jar
-192   * @param className the main class 
name
-193   * @param priority chaining priority
-194   * @param conf configuration for 
coprocessor
-195   * @param includedClassPrefixes class 
name prefixes to include
-196   * @throws java.io.IOException 
Exception
-197   */
-198  public E load(Path path, String 
className, int priority,
-199  Configuration conf, String[] 
includedClassPrefixes) throws IOException {
-200Class? implClass;
-201LOG.debug("Loading coprocessor class 
" + className + " with path " +
-202path + " and priority " + 
priority);
-203
-204ClassLoader cl = null;
-205if (path == null) {
-206  try {
-207implClass = 
getClass().getClassLoader().loadClass(className);
-208  } catch (ClassNotFoundException e) 
{
-209throw new IOException("No jar 
path specified for " + className);
-210  }
-211} else {
-212  cl = 
CoprocessorClassLoader.getClassLoader(
-213path, 
getClass().getClassLoader(), pathPrefix, conf);
-214  try {
-215implClass = 
((CoprocessorClassLoader)cl).loadClass(className, includedClassPrefixes);
-216  } catch (ClassNotFoundException e) 
{
-217throw new IOException("Cannot 
load external coprocessor class " + className, e);
-218  }
-219}
-220
-221//load custom code for coprocessor
-222Thread currentThread = 
Thread.currentThread();
-223ClassLoader hostClassLoader = 
currentThread.getContextClassLoader();
-224try{
-225  // switch temporarily to the thread 
classloader for custom CP
-226  
currentThread.setContextClassLoader(cl);
-227  E cpInstance = 
checkAndLoadInstance(implClass, priority, conf);
-228  return cpInstance;
-229} finally {
-230  // restore the fresh (host) 
classloader
-231  
currentThread.setContextClassLoader(hostClassLoader);
-232}
-233  }
-234
-235  @VisibleForTesting
-236  public void load(Class? extends 
C implClass, int priority, Configuration conf)
-237  throws IOException {
-238E env = 
checkAndLoadInstance(implClass, priority, conf);
-239coprocEnvironments.add(env);
-240  }
-241
-242  /**
-243   * @param implClass Implementation 
class
-244   * @param priority priority
-245   * @param conf configuration
-246   * @throws java.io.IOException 
Exception
-247   */
-248  public E 
checkAndLoadInstance(Class? implClass, int priority, Configuration 
conf)
-249  throws IOException {
-250// create the instance
-251C impl;
-252try {
-253  impl = 
checkAndGetInstance(implClass);
-254  if (impl == null) {
-255LOG.error("Cannot load 
coprocessor " + implClass.getSimpleName());
-256return null;
-257  }
-258} catch 
(InstantiationException|IllegalAccessException e) {
-259  throw new IOException(e);
-260}
-261// create the 

[14/51] [partial] hbase-site git commit: Published site at .

2018-02-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
index b7c24d7..eecd2f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
@@ -44,792 +44,792 @@
 036import java.util.List;
 037import java.util.Map;
 038import java.util.NavigableSet;
-039import java.util.Objects;
-040import java.util.PriorityQueue;
-041import java.util.Set;
-042import 
java.util.concurrent.ArrayBlockingQueue;
-043import 
java.util.concurrent.BlockingQueue;
-044import 
java.util.concurrent.ConcurrentHashMap;
-045import 
java.util.concurrent.ConcurrentMap;
-046import 
java.util.concurrent.ConcurrentSkipListSet;
-047import java.util.concurrent.Executors;
-048import 
java.util.concurrent.ScheduledExecutorService;
-049import java.util.concurrent.TimeUnit;
-050import 
java.util.concurrent.atomic.AtomicInteger;
-051import 
java.util.concurrent.atomic.AtomicLong;
-052import 
java.util.concurrent.atomic.LongAdder;
-053import java.util.concurrent.locks.Lock;
-054import 
java.util.concurrent.locks.ReentrantLock;
-055import 
java.util.concurrent.locks.ReentrantReadWriteLock;
-056import 
org.apache.hadoop.conf.Configuration;
-057import 
org.apache.hadoop.hbase.HBaseConfiguration;
-058import 
org.apache.hadoop.hbase.io.HeapSize;
-059import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-060import 
org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
-061import 
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
-062import 
org.apache.hadoop.hbase.io.hfile.BlockPriority;
-063import 
org.apache.hadoop.hbase.io.hfile.BlockType;
-064import 
org.apache.hadoop.hbase.io.hfile.CacheStats;
-065import 
org.apache.hadoop.hbase.io.hfile.Cacheable;
-066import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-067import 
org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
-068import 
org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
-069import 
org.apache.hadoop.hbase.io.hfile.CachedBlock;
-070import 
org.apache.hadoop.hbase.io.hfile.HFileBlock;
-071import 
org.apache.hadoop.hbase.nio.ByteBuff;
-072import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-073import 
org.apache.hadoop.hbase.util.HasThread;
-074import 
org.apache.hadoop.hbase.util.IdReadWriteLock;
-075import 
org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;
-076import 
org.apache.hadoop.util.StringUtils;
-077import 
org.apache.yetus.audience.InterfaceAudience;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-082import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-083import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-084
-085/**
-086 * BucketCache uses {@link 
BucketAllocator} to allocate/free blocks, and uses
-087 * BucketCache#ramCache and 
BucketCache#backingMap in order to
-088 * determine if a given element is in the 
cache. The bucket cache can use on-heap or
-089 * off-heap memory {@link 
ByteBufferIOEngine} or in a file {@link FileIOEngine} to
-090 * store/read the block data.
-091 *
-092 * pEviction is via a similar 
algorithm as used in
-093 * {@link 
org.apache.hadoop.hbase.io.hfile.LruBlockCache}
-094 *
-095 * pBucketCache can be used as 
mainly a block cache (see
-096 * {@link 
org.apache.hadoop.hbase.io.hfile.CombinedBlockCache}), combined with
-097 * LruBlockCache to decrease CMS GC and 
heap fragmentation.
-098 *
-099 * pIt also can be used as a 
secondary cache (e.g. using a file on ssd/fusionio to store
-100 * blocks) to enlarge cache space via
-101 * {@link 
org.apache.hadoop.hbase.io.hfile.LruBlockCache#setVictimCache}
-102 */
-103@InterfaceAudience.Private
-104public class BucketCache implements 
BlockCache, HeapSize {
-105  private static final Logger LOG = 
LoggerFactory.getLogger(BucketCache.class);
-106
-107  /** Priority buckets config */
-108  static final String 
SINGLE_FACTOR_CONFIG_NAME = "hbase.bucketcache.single.factor";
-109  static final String 
MULTI_FACTOR_CONFIG_NAME = "hbase.bucketcache.multi.factor";
-110  static final String 
MEMORY_FACTOR_CONFIG_NAME = "hbase.bucketcache.memory.factor";
-111  static final String 
EXTRA_FREE_FACTOR_CONFIG_NAME = "hbase.bucketcache.extrafreefactor";
-112  static final String 
ACCEPT_FACTOR_CONFIG_NAME = "hbase.bucketcache.acceptfactor";
-113  static final String 
MIN_FACTOR_CONFIG_NAME = "hbase.bucketcache.minfactor";
-114
-115  /** Priority buckets */
-116  @VisibleForTesting
-117  static final float 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncSingleRequestRpcRetryingCaller.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncSingleRequestRpcRetryingCaller.html
 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncSingleRequestRpcRetryingCaller.html
index c3df405..b59083b 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncSingleRequestRpcRetryingCaller.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncSingleRequestRpcRetryingCaller.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestAsyncSingleRequestRpcRetryingCaller
+public class TestAsyncSingleRequestRpcRetryingCaller
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -130,30 +130,34 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Field and Description
 
 
+static HBaseClassTestRule
+CLASS_RULE
+
+
 private static 
org.apache.hadoop.hbase.client.AsyncConnectionImpl
 CONN
 
-
+
 private static byte[]
 FAMILY
 
-
+
 private static byte[]
 QUALIFIER
 
-
+
 private static byte[]
 ROW
 
-
+
 private static 
org.apache.hadoop.hbase.TableName
 TABLE_NAME
 
-
+
 private static HBaseTestingUtility
 TEST_UTIL
 
-
+
 private static byte[]
 VALUE
 
@@ -239,13 +243,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Field Detail
+
+
+
+
+
+CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
+
+
 
 
 
 
 
 TEST_UTIL
-private static finalHBaseTestingUtility TEST_UTIL
+private static finalHBaseTestingUtility TEST_UTIL
 
 
 
@@ -254,7 +267,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TABLE_NAME
-private staticorg.apache.hadoop.hbase.TableName TABLE_NAME
+private staticorg.apache.hadoop.hbase.TableName TABLE_NAME
 
 
 
@@ -263,7 +276,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 FAMILY
-private staticbyte[] FAMILY
+private staticbyte[] FAMILY
 
 
 
@@ -272,7 +285,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 QUALIFIER
-private staticbyte[] QUALIFIER
+private staticbyte[] QUALIFIER
 
 
 
@@ -281,7 +294,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ROW
-private staticbyte[] ROW
+private staticbyte[] ROW
 
 
 
@@ -290,7 +303,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 VALUE
-private staticbyte[] VALUE
+private staticbyte[] VALUE
 
 
 
@@ -299,7 +312,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CONN
-private staticorg.apache.hadoop.hbase.client.AsyncConnectionImpl CONN
+private staticorg.apache.hadoop.hbase.client.AsyncConnectionImpl CONN
 
 
 
@@ -316,7 +329,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TestAsyncSingleRequestRpcRetryingCaller
-publicTestAsyncSingleRequestRpcRetryingCaller()
+publicTestAsyncSingleRequestRpcRetryingCaller()
 
 
 
@@ -333,7 +346,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setUpBeforeClass
-public staticvoidsetUpBeforeClass()
+public staticvoidsetUpBeforeClass()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -347,7 +360,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tearDownAfterClass
-public staticvoidtearDownAfterClass()
+public staticvoidtearDownAfterClass()
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -361,7 +374,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testRegionMove
-publicvoidtestRegionMove()
+publicvoidtestRegionMove()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutionException,
http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -379,7 +392,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 failedFuture
-privateThttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureTfailedFuture()

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 3f74159..3445980 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -97,809 +97,809 @@
 089 * value = backupId and full WAL file 
name/li
 090 * /ul/p
 091 */
-092
-093@InterfaceAudience.Private
-094public final class BackupSystemTable 
implements Closeable {
-095  private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
-096
-097  static class WALItem {
-098String backupId;
-099String walFile;
-100String backupRoot;
-101
-102WALItem(String backupId, String 
walFile, String backupRoot) {
-103  this.backupId = backupId;
-104  this.walFile = walFile;
-105  this.backupRoot = backupRoot;
-106}
-107
-108public String getBackupId() {
-109  return backupId;
-110}
-111
-112public String getWalFile() {
-113  return walFile;
-114}
-115
-116public String getBackupRoot() {
-117  return backupRoot;
-118}
-119
-120@Override
-121public String toString() {
-122  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
-123}
+092@InterfaceAudience.Private
+093public final class BackupSystemTable 
implements Closeable {
+094  private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
+095
+096  static class WALItem {
+097String backupId;
+098String walFile;
+099String backupRoot;
+100
+101WALItem(String backupId, String 
walFile, String backupRoot) {
+102  this.backupId = backupId;
+103  this.walFile = walFile;
+104  this.backupRoot = backupRoot;
+105}
+106
+107public String getBackupId() {
+108  return backupId;
+109}
+110
+111public String getWalFile() {
+112  return walFile;
+113}
+114
+115public String getBackupRoot() {
+116  return backupRoot;
+117}
+118
+119@Override
+120public String toString() {
+121  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
+122}
+123  }
 124
-125  }
-126
-127  /**
-128   * Backup system table (main) name
-129   */
-130  private TableName tableName;
-131
-132  /**
-133   * Backup System table name for bulk 
loaded files.
-134   * We keep all bulk loaded file 
references in a separate table
-135   * because we have to isolate general 
backup operations: create, merge etc
-136   * from activity of RegionObserver, 
which controls process of a bulk loading
-137   * {@link 
org.apache.hadoop.hbase.backup.BackupObserver}
-138   */
-139
-140  private TableName bulkLoadTableName;
-141
-142  /**
-143   * Stores backup sessions (contexts)
-144   */
-145  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
-146  /**
-147   * Stores other meta
-148   */
-149  final static byte[] META_FAMILY = 
"meta".getBytes();
-150  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
-151  /**
-152   * Connection to HBase cluster, shared 
among all instances
-153   */
-154  private final Connection connection;
-155
-156  private final static String 
BACKUP_INFO_PREFIX = "session:";
-157  private final static String 
START_CODE_ROW = "startcode:";
-158  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
-159  private final static byte[] 
ACTIVE_SESSION_COL = "c".getBytes();
+125  /**
+126   * Backup system table (main) name
+127   */
+128  private TableName tableName;
+129
+130  /**
+131   * Backup System table name for bulk 
loaded files.
+132   * We keep all bulk loaded file 
references in a separate table
+133   * because we have to isolate general 
backup operations: create, merge etc
+134   * from activity of RegionObserver, 
which controls process of a bulk loading
+135   * {@link 
org.apache.hadoop.hbase.backup.BackupObserver}
+136   */
+137  private TableName bulkLoadTableName;
+138
+139  /**
+140   * Stores backup sessions (contexts)
+141   */
+142  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
+143  /**
+144   * Stores other meta
+145   */
+146  final static byte[] META_FAMILY = 
"meta".getBytes();
+147  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
+148  /**
+149   * Connection to HBase cluster, shared 
among all instances
+150   */
+151  private final Connection connection;
+152
+153  private final static String 
BACKUP_INFO_PREFIX = "session:";
+154  private final static String 
START_CODE_ROW = "startcode:";
+155  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
+156  private final static 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.SparseFilter.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.SparseFilter.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.SparseFilter.html
deleted file mode 100644
index 9699eca..000
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.SparseFilter.html
+++ /dev/null
@@ -1,350 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-TestScannerHeartbeatMessages.SparseFilter (Apache HBase 3.0.0-SNAPSHOT 
Test API)
-
-
-
-
-
-var methods = {"i0":10,"i1":9};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.regionserver
-Class TestScannerHeartbeatMessages.SparseFilter
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.filter.Filter
-
-
-org.apache.hadoop.hbase.filter.FilterBase
-
-
-org.apache.hadoop.hbase.regionserver.TestScannerHeartbeatMessages.SparseFilter
-
-
-
-
-
-
-
-
-
-
-
-Enclosing class:
-TestScannerHeartbeatMessages
-
-
-
-public static class TestScannerHeartbeatMessages.SparseFilter
-extends org.apache.hadoop.hbase.filter.FilterBase
-
-
-
-
-
-
-
-
-
-
-
-Nested Class Summary
-
-
-
-
-Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.filter.Filter
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
-
-
-
-
-
-
-
-
-Field Summary
-
-
-
-
-Fields inherited from classorg.apache.hadoop.hbase.filter.Filter
-reversed
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Constructor and Description
-
-
-SparseFilter()
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All MethodsStatic MethodsInstance MethodsConcrete Methods
-
-Modifier and Type
-Method and Description
-
-
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
-filterCell(org.apache.hadoop.hbase.Cellv)
-
-
-static 
org.apache.hadoop.hbase.filter.Filter
-parseFrom(byte[]pbBytes)
-
-
-
-
-
-
-Methods inherited from 
classorg.apache.hadoop.hbase.filter.FilterBase
-createFilterFromArguments, filterAllRemaining, filterRow, 
filterRowCells, filterRowKey, filterRowKey, getNextCellHint, hasFilterRow, 
isFamilyEssential, reset, toByteArray, toString, transformCell
-
-
-
-
-
-Methods inherited from 
classorg.apache.hadoop.hbase.filter.Filter
-filterKeyValue, isReversed, setReversed
-
-
-
-
-
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 3d7dd40..8ad69a5 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -445,20 +445,20 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.ProcedureState
-org.apache.hadoop.hbase.KeyValue.Type
-org.apache.hadoop.hbase.MemoryCompactionPolicy
+org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
 org.apache.hadoop.hbase.CellBuilderType
-org.apache.hadoop.hbase.CompatibilitySingletonFactory.SingletonStorage
+org.apache.hadoop.hbase.CompareOperator
 org.apache.hadoop.hbase.HConstants.OperationStatusCode
-org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
-org.apache.hadoop.hbase.MetaTableAccessor.QueryType
-org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.Cell.Type
+org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.ClusterMetrics.Option
-org.apache.hadoop.hbase.CompareOperator
+org.apache.hadoop.hbase.KeepDeletedCells
+org.apache.hadoop.hbase.MemoryCompactionPolicy
+org.apache.hadoop.hbase.KeyValue.Type
 org.apache.hadoop.hbase.Size.Unit
 org.apache.hadoop.hbase.Coprocessor.State
+org.apache.hadoop.hbase.MetaTableAccessor.QueryType
+org.apache.hadoop.hbase.CompatibilitySingletonFactory.SingletonStorage
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/procedure/Procedure.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure/Procedure.html
index d8c18c8..2125999 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure/Procedure.html
@@ -481,7 +481,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 joinBarrierLock
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object joinBarrierLock
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object joinBarrierLock
 lock to prevent nodes from acquiring and then releasing 
before we can track them
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.html 
b/devapidocs/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.html
index f2d8c67..f841737 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.html
@@ -338,7 +338,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 register
-publicvoidregister(Eobj)
+publicvoidregister(Eobj)
 
 
 
@@ -347,7 +347,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getProcedureManagers
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetEgetProcedureManagers()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetEgetProcedureManagers()
 
 
 
@@ -356,7 +356,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 loadProcedures
-public abstractvoidloadProcedures(org.apache.hadoop.conf.Configurationconf)
+public abstractvoidloadProcedures(org.apache.hadoop.conf.Configurationconf)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/procedure/Subprocedure.SubprocedureImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure/Subprocedure.SubprocedureImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure/Subprocedure.SubprocedureImpl.html
index b2ab836..f18cf00 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure/Subprocedure.SubprocedureImpl.html
+++ 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/testdevapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
index 1636aa6..f79f186 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/MiniHBaseCluster.SingleFileSystemShutdownThread.html
@@ -86,811 +86,824 @@
 078   */
 079  public MiniHBaseCluster(Configuration 
conf, int numMasters, int numRegionServers)
 080  throws IOException, 
InterruptedException {
-081this(conf, numMasters, 
numRegionServers, null, null, null);
+081this(conf, numMasters, 
numRegionServers, null, null);
 082  }
 083
 084  /**
-085   * @param rsPorts Ports that 
RegionServer should use; pass ports if you want to test cluster
-086   *   restart where for sure the 
regionservers come up on same address+port (but
-087   *   just with different startcode); by 
default mini hbase clusters choose new
-088   *   arbitrary ports on each cluster 
start.
-089   * @throws IOException
-090   * @throws InterruptedException
-091   */
-092  public MiniHBaseCluster(Configuration 
conf, int numMasters, int numRegionServers,
-093 ListInteger rsPorts,
-094 Class? extends HMaster 
masterClass,
-095 Class? extends 
MiniHBaseCluster.MiniHBaseClusterRegionServer regionserverClass)
-096  throws IOException, 
InterruptedException {
-097super(conf);
-098conf.set(HConstants.MASTER_PORT, 
"0");
-099if 
(conf.getInt(HConstants.MASTER_INFO_PORT, 0) != -1) {
-100  
conf.set(HConstants.MASTER_INFO_PORT, "0");
-101}
-102
-103// Hadoop 2
-104
CompatibilityFactory.getInstance(MetricsAssertHelper.class).init();
-105
-106init(numMasters, numRegionServers, 
rsPorts, masterClass, regionserverClass);
-107this.initialClusterStatus = 
getClusterStatus();
-108  }
-109
-110  public Configuration getConfiguration() 
{
-111return this.conf;
-112  }
-113
-114  /**
-115   * Subclass so can get at protected 
methods (none at moment).  Also, creates
-116   * a FileSystem instance per 
instantiation.  Adds a shutdown own FileSystem
-117   * on the way out. Shuts down own 
Filesystem only, not All filesystems as
-118   * the FileSystem system exit hook 
does.
-119   */
-120  public static class 
MiniHBaseClusterRegionServer extends HRegionServer {
-121private Thread shutdownThread = 
null;
-122private User user = null;
-123/**
-124 * List of RegionServers killed so 
far. ServerName also comprises startCode of a server,
-125 * so any restarted instances of the 
same server will have different ServerName and will not
-126 * coincide with past dead ones. So 
there's no need to cleanup this list.
-127 */
-128static SetServerName 
killedServers = new HashSet();
-129
-130public 
MiniHBaseClusterRegionServer(Configuration conf)
-131throws IOException, 
InterruptedException {
-132  super(conf);
-133  this.user = User.getCurrent();
-134}
-135
-136/*
-137 * @param c
-138 * @param currentfs We return this if 
we did not make a new one.
-139 * @param uniqueName Same name used 
to help identify the created fs.
-140 * @return A new fs instance if we 
are up on DistributeFileSystem.
-141 * @throws IOException
-142 */
-143
-144@Override
-145protected void 
handleReportForDutyResponse(
-146final RegionServerStartupResponse 
c) throws IOException {
-147  
super.handleReportForDutyResponse(c);
-148  // Run this thread to shutdown our 
filesystem on way out.
-149  this.shutdownThread = new 
SingleFileSystemShutdownThread(getFileSystem());
-150}
-151
-152@Override
-153public void run() {
-154  try {
-155this.user.runAs(new 
PrivilegedActionObject(){
-156  public Object run() {
-157runRegionServer();
-158return null;
-159  }
-160});
-161  } catch (Throwable t) {
-162LOG.error("Exception in run", 
t);
-163  } finally {
-164// Run this on the way out.
-165if (this.shutdownThread != null) 
{
-166  this.shutdownThread.start();
-167  
Threads.shutdown(this.shutdownThread, 3);
-168}
-169  }
-170}
-171
-172private void runRegionServer() {
-173  super.run();
-174}
-175
-176@Override
-177protected void kill() {
-178  
killedServers.add(getServerName());
-179  super.kill();
-180}
-181
-182@Override
-183public void abort(final String 
reason, final Throwable cause) {
-184  this.user.runAs(new 
PrivilegedActionObject() {
-185public 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.SleepCoprocessor.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.SleepCoprocessor.html
 
b/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.SleepCoprocessor.html
deleted file mode 100644
index e83a425..000
--- 
a/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.SleepCoprocessor.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class org.apache.hadoop.hbase.client.TestHCM.SleepCoprocessor 
(Apache HBase 3.0.0-SNAPSHOT Test API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.client.TestHCM.SleepCoprocessor
-
-No usage of 
org.apache.hadoop.hbase.client.TestHCM.SleepCoprocessor
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.html
deleted file mode 100644
index f44cef5..000
--- a/testdevapidocs/org/apache/hadoop/hbase/client/class-use/TestHCM.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class org.apache.hadoop.hbase.client.TestHCM (Apache HBase 
3.0.0-SNAPSHOT Test API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of Classorg.apache.hadoop.hbase.client.TestHCM
-
-No usage of 
org.apache.hadoop.hbase.client.TestHCM
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/package-frame.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/package-frame.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/package-frame.html
index 51cba7a..4e2f3d0 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/package-frame.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/package-frame.html
@@ -20,6 +20,11 @@
 Classes
 
 AbstractTestAsyncTableScan
+AbstractTestCIOperationTimeout
+AbstractTestCIRpcTimeout
+AbstractTestCITimeout
+AbstractTestCITimeout.SleepAndFailFirstTime
+AbstractTestCITimeout.SleepCoprocessor
 AbstractTestResultScannerCursor
 AbstractTestScanCursor
 AbstractTestScanCursor.SparseFilter
@@ -107,6 +112,15 @@
 TestBufferedMutatorParams
 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index dc2a8e8..3630ba9 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -143,8 +143,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.TestBackupDeleteWithFailures.Failure
 org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
+org.apache.hadoop.hbase.backup.TestBackupDeleteWithFailures.Failure
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
index f11329e..5c8c746 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
@@ -4062,6 +4062,23 @@
   intinitRowCount,
   intfactor)
 
+
+static int
+TestLoadIncrementalHFiles.loadHFiles(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtestName,
+  org.apache.hadoop.hbase.client.TableDescriptorhtd,
+  HBaseTestingUtilityutil,
+  byte[]fam,
+  byte[]qual,
+  booleanpreCreateTable,
+  byte[][]tableSplitKeys,
+  byte[][][]hfileRanges,
+  booleanuseMap,
+  booleandeleteFile,
+  booleancopyFiles,
+  intinitRowCount,
+  intfactor,
+  intdepth)
+
 
 
 
@@ -4256,7 +4273,7 @@
 TestBoundedRegionGroupingStrategy.TEST_UTIL
 
 
-protected static HBaseTestingUtility
+private static HBaseTestingUtility
 TestFSHLogProvider.TEST_UTIL
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/coprocessor/TestWALObserver.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/coprocessor/TestWALObserver.html 
b/testdevapidocs/org/apache/hadoop/hbase/coprocessor/TestWALObserver.html
index 231e17c..f64f91d 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/coprocessor/TestWALObserver.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/coprocessor/TestWALObserver.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestWALObserver
+public class TestWALObserver
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Tests invocation of the
  MasterObserver interface hooks at
@@ -189,10 +189,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 TEST_VALUE
 
 
-private static byte[]
-UNSPECIFIED_REGION
-
-
 private 
org.apache.hadoop.hbase.wal.WALFactory
 wals
 
@@ -237,8 +233,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 private void
-addWALEdits(org.apache.hadoop.hbase.TableNametableName,
-   org.apache.hadoop.hbase.HRegionInfohri,
+addWALEdits(org.apache.hadoop.hbase.TableNametableName,
+   org.apache.hadoop.hbase.client.RegionInfohri,
byte[]rowName,
byte[]family,
intcount,
@@ -248,12 +244,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?

org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControlmvcc)
 
 
-private 
org.apache.hadoop.hbase.HRegionInfo
-createBasic3FamilyHRegionInfo(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableName)
+private 
org.apache.hadoop.hbase.client.TableDescriptor
+createBasic3FamilyHTD(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableName)
 
 
-private 
org.apache.hadoop.hbase.HTableDescriptor
-createBasic3FamilyHTD(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableName)
+private 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 370b636..2d7c1e9 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":9,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":9,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":10,"i112":10,"i113":9,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":9,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":9,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":9,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":9,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":9,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HMaster
+public class HMaster
 extends HRegionServer
 implements MasterServices
 HMaster is the "master server" for HBase. An HBase cluster 
has one active
@@ -276,7 +276,7 @@ implements hfileCleaner
 
 
-private ProcedureEvent
+private ProcedureEvent?
 initialized
 
 
@@ -398,58 +398,54 @@ implements regionServerTracker
 
 
-private ReplicationManager
-replicationManager
-
-
 private ReplicationMetaCleaner
 replicationMetaCleaner
 
-
-private ReplicationZKNodeCleanerChore
-replicationZKNodeCleanerChore
-
 
+private ReplicationPeerManager
+replicationPeerManager
+
+
 (package private) MemoryBoundedLogMessageBuffer
 rsFatals
 
-
-private ProcedureEvent
+
+private ProcedureEvent?
 serverCrashProcessingEnabled
 
-
+
 private ServerManager
 serverManager
 
-
+
 (package private) boolean
 serviceStarted
 
-
+
 (package private) SnapshotManager
 snapshotManager
 
-
+
 private SnapshotQuotaObserverChore
 snapshotQuotaChore
 
-
+
 private SpaceQuotaSnapshotNotifier
 spaceQuotaSnapshotNotifier
 
-
+
 private SplitOrMergeTracker
 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.RandomizedMatrix.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.RandomizedMatrix.html
 
b/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.RandomizedMatrix.html
index c4b0754..1e3aa89 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.RandomizedMatrix.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.RandomizedMatrix.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class RegionPlacementMaintainer.RandomizedMatrix
+protected static class RegionPlacementMaintainer.RandomizedMatrix
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Some algorithms for solving the assignment problem may 
traverse workers or
  jobs in linear order which may result in skewing the assignments of the
@@ -255,7 +255,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 rows
-private finalint rows
+private finalint rows
 
 
 
@@ -264,7 +264,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 cols
-private finalint cols
+private finalint cols
 
 
 
@@ -273,7 +273,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 rowTransform
-private finalint[] rowTransform
+private finalint[] rowTransform
 
 
 
@@ -282,7 +282,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 rowInverse
-private finalint[] rowInverse
+private finalint[] rowInverse
 
 
 
@@ -291,7 +291,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 colTransform
-private finalint[] colTransform
+private finalint[] colTransform
 
 
 
@@ -300,7 +300,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 colInverse
-private finalint[] colInverse
+private finalint[] colInverse
 
 
 
@@ -317,7 +317,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 RandomizedMatrix
-publicRandomizedMatrix(introws,
+publicRandomizedMatrix(introws,
 intcols)
 Create a randomization scheme for a matrix of a given 
size.
 
@@ -341,7 +341,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 transform
-publicfloat[][]transform(float[][]matrix)
+publicfloat[][]transform(float[][]matrix)
 Copy a given matrix into a new matrix, transforming each 
row index and
  each column index according to the randomization scheme that was created
  at construction time.
@@ -359,7 +359,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 invert
-publicfloat[][]invert(float[][]matrix)
+publicfloat[][]invert(float[][]matrix)
 Copy a given matrix into a new matrix, transforming each 
row index and
  each column index according to the inverse of the randomization scheme
  that was created at construction time.
@@ -377,7 +377,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 invertIndices
-publicint[]invertIndices(int[]indices)
+publicint[]invertIndices(int[]indices)
 Given an array where each element indices[i] 
represents the
  randomized column index corresponding to randomized row index i,
  create a new array with the corresponding inverted indices.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.html 
b/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.html
index bb0399a..5d7b682 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.html
@@ -599,7 +599,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getNewAssignmentPlan
-publicFavoredNodesPlangetNewAssignmentPlan()
+publicFavoredNodesPlangetNewAssignmentPlan()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -613,7 +613,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 printAssignmentPlan
-public staticvoidprintAssignmentPlan(FavoredNodesPlanplan)
+public staticvoidprintAssignmentPlan(FavoredNodesPlanplan)
 Print the assignment plan to the system output stream
 
 Parameters:
@@ -627,7 +627,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 updateAssignmentPlanToMeta

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
index dc127cd..2640e94 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
@@ -47,483 +47,482 @@
 039import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfigBuilder;
 040import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 041import 
org.apache.hadoop.hbase.util.Bytes;
-042import 
org.apache.hadoop.hbase.util.Strings;
-043import 
org.apache.yetus.audience.InterfaceAudience;
-044import 
org.apache.yetus.audience.InterfaceStability;
-045import org.slf4j.Logger;
-046import org.slf4j.LoggerFactory;
-047
-048import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-049import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-050import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-051import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-052import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-053import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
-054
-055/**
-056 * Helper for TableCFs Operations.
-057 */
-058@InterfaceAudience.Private
-059@InterfaceStability.Stable
-060public final class 
ReplicationPeerConfigUtil {
-061
-062  private static final Logger LOG = 
LoggerFactory.getLogger(ReplicationPeerConfigUtil.class);
-063
-064  private ReplicationPeerConfigUtil() 
{}
-065
-066  public static String 
convertToString(SetString namespaces) {
-067if (namespaces == null) {
-068  return null;
-069}
-070return StringUtils.join(namespaces, 
';');
-071  }
-072
-073  /** convert map to TableCFs Object */
-074  public static 
ReplicationProtos.TableCF[] convert(
-075  MapTableName, ? extends 
CollectionString tableCfs) {
-076if (tableCfs == null) {
-077  return null;
-078}
-079ListReplicationProtos.TableCF 
tableCFList = new ArrayList(tableCfs.entrySet().size());
-080ReplicationProtos.TableCF.Builder 
tableCFBuilder =  ReplicationProtos.TableCF.newBuilder();
-081for (Map.EntryTableName, ? 
extends CollectionString entry : tableCfs.entrySet()) {
-082  tableCFBuilder.clear();
-083  
tableCFBuilder.setTableName(ProtobufUtil.toProtoTableName(entry.getKey()));
-084  CollectionString v = 
entry.getValue();
-085  if (v != null  
!v.isEmpty()) {
-086for (String value : 
entry.getValue()) {
-087  
tableCFBuilder.addFamilies(ByteString.copyFromUtf8(value));
-088}
-089  }
-090  
tableCFList.add(tableCFBuilder.build());
-091}
-092return tableCFList.toArray(new 
ReplicationProtos.TableCF[tableCFList.size()]);
-093  }
-094
-095  public static String 
convertToString(MapTableName, ? extends CollectionString 
tableCfs) {
-096if (tableCfs == null) {
-097  return null;
-098}
-099return convert(convert(tableCfs));
-100  }
-101
-102  /**
-103   *  Convert string to TableCFs 
Object.
-104   *  This is only for read TableCFs 
information from TableCF node.
-105   *  Input String Format: 
ns1.table1:cf1,cf2;ns2.table2:cfA,cfB;ns3.table3.
-106   * */
-107  public static 
ReplicationProtos.TableCF[] convert(String tableCFsConfig) {
-108if (tableCFsConfig == null || 
tableCFsConfig.trim().length() == 0) {
-109  return null;
-110}
-111
-112ReplicationProtos.TableCF.Builder 
tableCFBuilder = ReplicationProtos.TableCF.newBuilder();
-113String[] tables = 
tableCFsConfig.split(";");
-114ListReplicationProtos.TableCF 
tableCFList = new ArrayList(tables.length);
-115
-116for (String tab : tables) {
-117  // 1 ignore empty table config
-118  tab = tab.trim();
-119  if (tab.length() == 0) {
-120continue;
-121  }
-122  // 2 split to "table" and 
"cf1,cf2"
-123  //   for each table: 
"table#cf1,cf2" or "table"
-124  String[] pair = tab.split(":");
-125  String tabName = pair[0].trim();
-126  if (pair.length  2 || 
tabName.length() == 0) {
-127LOG.info("incorrect format:" + 
tableCFsConfig);
-128continue;
-129  }
-130
-131  tableCFBuilder.clear();
-132  // split namespace from tableName
-133  String ns = "default";
-134  String tName = tabName;
-135  String[] dbs = 
tabName.split("\\.");
-136  if (dbs != null  
dbs.length == 2) {
-137ns = dbs[0];
-138tName = dbs[1];
-139  }
-140  tableCFBuilder.setTableName(
-141
ProtobufUtil.toProtoTableName(TableName.valueOf(ns, tName)));
-142
-143  

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.Integrity.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.Integrity.html 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.Integrity.html
index 099d970..7c20043 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.Integrity.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.Integrity.html
@@ -506,6 +506,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.html 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.html
index b063349..9f0594e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.html
@@ -436,6 +436,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AES.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AES.html 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AES.html
index b050468..26364f8 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AES.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AES.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESDecryptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESDecryptor.html 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESDecryptor.html
index 94a96c3..403b775 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESDecryptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESDecryptor.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESEncryptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESEncryptor.html 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESEncryptor.html
index c73b2e2..6739d3c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESEncryptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/AESEncryptor.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/CommonsCryptoAES.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/CommonsCryptoAES.html
 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/CommonsCryptoAES.html
index c84cbb0..d5c7804 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/CommonsCryptoAES.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/aes/class-use/CommonsCryptoAES.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 


[14/51] [partial] hbase-site git commit: Published site at .

2017-12-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.html
index 5550dbf..b066496 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.html
@@ -135,7 +135,7 @@ extends TestHRegion
-TestHRegion.FlushThread, TestHRegion.GetTillDoneOrException, 
TestHRegion.HRegionWithSeqId, TestHRegion.IsFlushWALMarker, TestHRegion.PutThread
+TestHRegion.FlushThread, TestHRegion.GetTillDoneOrException, 
TestHRegion.HRegionForTesting, TestHRegion.HRegionWithSeqId, TestHRegion.HStoreForTesting, TestHRegion.IsFlushWALMarker, TestHRegion.PutThread
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
index 1f5e5df..e9211e8 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
@@ -220,7 +220,7 @@ extends org.apache.hadoop.hbase.regionserver.HStore
 
 
 Methods inherited from 
classorg.apache.hadoop.hbase.regionserver.HStore
-add, add, addChangedReaderObserver, areWritesEnabled, 
assertBulkLoadHFileOk, bulkLoadHFile, bulkLoadHFile, cancelRequestedCompaction, 
canSplit, close, closeAndArchiveCompactedFiles, compact, 
compactRecentForTestingAssumingDefaultPolicy, completeCompaction, 
createCacheConf, createFlushContext, createScanner, createStoreEngine, 
createWriterInTmp, deleteChangedReaderObserver, deregisterChildren, 
determineTTLFromFamily, flushCache, getAvgStoreFileAge, getBlockingFileCount, 
getBytesPerChecksum, getCacheConfig, getChecksumType, getCloseCheckInterval, 
getColumnFamilyDescriptor, getColumnFamilyName, getCompactedCellsCount, 
getCompactedCellsSize, getCompactedFiles, getCompactedFilesCount, 
getCompactionCheckMultiplier, getCompactionPressure, getCompactionProgress, 
getCompactPriority, getComparator, getCoprocessorHost, getDataBlockEncoder, 
getFileSystem, getFlushableSize, getFlushedCellsCount, getFlushedCellsSize, 
getFlushedOutputFileSize, getHFilesSize, getHRegion, getLastCompactSize, g
 etMajorCompactedCellsCount, getMajorCompactedCellsSize, getMaxMemStoreTS, 
getMaxSequenceId, getMaxStoreFileAge, getMemStoreFlushSize, getMemStoreSize, 
getMinStoreFileAge, getNumHFiles, getNumReferenceFiles, getOffPeakHours, 
getRegionFileSystem, getRegionInfo, getScanInfo, getScanner, getScanners, 
getScanners, getScanners, getSize, getSnapshotSize, getSplitPoint, 
getStoreEngine, getStorefiles, getStorefilesCount, 
getStorefilesRootLevelIndexSize, getStorefilesSize, getStoreFileTtl, 
getStoreHomedir, getStoreHomedir, getStoreSizeUncompressed, getTableName, 
getTotalStaticBloomSize, getTotalStaticIndexSize, hasReferences, 
hasTooManyStoreFiles, heapSize, isPrimaryReplicaStore, isSloppyMemStore, 
moveFileIntoPlace, needsCompaction, onConfigurationChange, 
postSnapshotOperation, preBulkLoadHFile, preFlushSeqIDEstimation, 
preSnapshotOperation, recreateScanners, refreshStoreFiles, refreshStoreFiles, 
registerChildren, replaceStoreFiles, replayCompactionMarker, requestCompaction, 
requestCompaction
 , setDataBlockEncoderInTest, setScanInfo, shouldPerformMajorCompaction, 
snapshot, startReplayingFromWAL, stopReplayingFromWAL, throttleCompaction, 
timeOfOldestEdit, toString, triggerMajorCompaction, upsert, 
versionsToReturn
+add, add, addChangedReaderObserver, areWritesEnabled, 
assertBulkLoadHFileOk, bulkLoadHFile, bulkLoadHFile, cancelRequestedCompaction, 
canSplit, close, closeAndArchiveCompactedFiles, compact, 
compactRecentForTestingAssumingDefaultPolicy, completeCompaction, 
createCacheConf, createFlushContext, createScanner, createStoreEngine, 
createStoreFileAndReader, createWriterInTmp, deleteChangedReaderObserver, 
deregisterChildren, determineTTLFromFamily, doCompaction, flushCache, 
getAvgStoreFileAge, getBlockingFileCount, getBytesPerChecksum, getCacheConfig, 
getChecksumType, getCloseCheckInterval, getColumnFamilyDescriptor, 
getColumnFamilyName, getCompactedCellsCount, getCompactedCellsSize, 
getCompactedFiles, getCompactedFilesCount, getCompactionCheckMultiplier, 
getCompactionPressure, getCompactionProgress, getCompactPriority, 
getComparator, getCoprocessorHost, getDataBlockEncoder, getFileSystem, 
getFlushableSize, getFlushedCellsCount, getFlushedCellsSize, 
getFlushedOutputFileSize, 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
index b8a90bd..b3afa61 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyRpcServer.html
@@ -120,8 +120,8 @@
 NettyRpcServerPreambleHandler(NettyRpcServerrpcServer)
 
 
-NettyServerRpcConnection(NettyRpcServerrpcServer,
-
org.apache.hadoop.hbase.shaded.io.netty.channel.Channelchannel)
+NettyServerRpcConnection(NettyRpcServerrpcServer,
+
org.apache.hbase.thirdparty.io.netty.channel.Channelchannel)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerCall.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerCall.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerCall.html
index a8d1320..d79e4cb 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerCall.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerCall.html
@@ -106,11 +106,11 @@
 
 
 NettyServerCall
-NettyServerRpcConnection.createCall(intid,
-  
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,
-  
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,
+NettyServerRpcConnection.createCall(intid,
+  
org.apache.hbase.thirdparty.com.google.protobuf.BlockingServiceservice,
+  
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptormd,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
-  
org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
+  org.apache.hbase.thirdparty.com.google.protobuf.Messageparam,
   CellScannercellScanner,
   longsize,
   http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true;
 title="class or interface in java.net">InetAddressremoteAddress,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerRpcConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerRpcConnection.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerRpcConnection.html
index f75ddd4..3a74819 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerRpcConnection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/NettyServerRpcConnection.html
@@ -123,7 +123,7 @@
 
 
 protected NettyServerRpcConnection
-NettyRpcServerPreambleHandler.createNettyServerRpcConnection(org.apache.hadoop.hbase.shaded.io.netty.channel.Channelchannel)
+NettyRpcServerPreambleHandler.createNettyServerRpcConnection(org.apache.hbase.thirdparty.io.netty.channel.Channelchannel)
 
 
 
@@ -151,11 +151,11 @@
 
 
 
-NettyServerCall(intid,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,
+NettyServerCall(intid,
+   
org.apache.hbase.thirdparty.com.google.protobuf.BlockingServiceservice,
+   
org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptormd,

org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
-   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
+   
org.apache.hbase.thirdparty.com.google.protobuf.Messageparam,
CellScannercellScanner,
NettyServerRpcConnectionconnection,
longsize,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/ipc/class-use/QosPriority.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/QosPriority.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/QosPriority.html
index 330d74f..423dd95 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/QosPriority.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/QosPriority.html
@@ -108,19 +108,19 @@
 
 
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse
-MasterRpcServices.compactRegion(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
+MasterRpcServices.compactRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
   

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
index f8eace7..66b6656 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue implements 
ExtendedCell {
-083  private static final 
ArrayListTag 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b618ac40/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
index ea801d2..6d58b59 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
@@ -106,264 +106,257 @@
 098   * @throws java.io.IOException e
 099   */
 100  public Increment add(Cell cell) throws 
IOException{
-101byte [] family = 
CellUtil.cloneFamily(cell);
-102ListCell list = 
getCellList(family);
-103//Checking that the row of the kv is 
the same as the put
-104if (!CellUtil.matchingRows(cell, 
this.row)) {
-105  throw new WrongRowIOException("The 
row in " + cell +
-106" doesn't match the original one 
" +  Bytes.toStringBinary(this.row));
-107}
-108list.add(cell);
-109return this;
-110  }
-111
-112  /**
-113   * Increment the column from the 
specific family with the specified qualifier
-114   * by the specified amount.
-115   * p
-116   * Overrides previous calls to 
addColumn for this family and qualifier.
-117   * @param family family name
-118   * @param qualifier column qualifier
-119   * @param amount amount to increment 
by
-120   * @return the Increment object
-121   */
-122  public Increment addColumn(byte [] 
family, byte [] qualifier, long amount) {
-123if (family == null) {
-124  throw new 
IllegalArgumentException("family cannot be null");
-125}
-126ListCell list = 
getCellList(family);
-127KeyValue kv = 
createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount));
-128list.add(kv);
-129return this;
-130  }
-131
-132  /**
-133   * Gets the TimeRange used for this 
increment.
-134   * @return TimeRange
-135   */
-136  public TimeRange getTimeRange() {
-137return this.tr;
-138  }
-139
-140  /**
-141   * Sets the TimeRange to be used on the 
Get for this increment.
+101super.add(cell);
+102return this;
+103  }
+104
+105  /**
+106   * Increment the column from the 
specific family with the specified qualifier
+107   * by the specified amount.
+108   * p
+109   * Overrides previous calls to 
addColumn for this family and qualifier.
+110   * @param family family name
+111   * @param qualifier column qualifier
+112   * @param amount amount to increment 
by
+113   * @return the Increment object
+114   */
+115  public Increment addColumn(byte [] 
family, byte [] qualifier, long amount) {
+116if (family == null) {
+117  throw new 
IllegalArgumentException("family cannot be null");
+118}
+119ListCell list = 
getCellList(family);
+120KeyValue kv = 
createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount));
+121list.add(kv);
+122return this;
+123  }
+124
+125  /**
+126   * Gets the TimeRange used for this 
increment.
+127   * @return TimeRange
+128   */
+129  public TimeRange getTimeRange() {
+130return this.tr;
+131  }
+132
+133  /**
+134   * Sets the TimeRange to be used on the 
Get for this increment.
+135   * p
+136   * This is useful for when you have 
counters that only last for specific
+137   * periods of time (ie. counters that 
are partitioned by time).  By setting
+138   * the range of valid times for this 
increment, you can potentially gain
+139   * some performance with a more optimal 
Get operation.
+140   * Be careful adding the time range to 
this class as you will update the old cell if the
+141   * time range doesn't include the 
latest cells.
 142   * p
-143   * This is useful for when you have 
counters that only last for specific
-144   * periods of time (ie. counters that 
are partitioned by time).  By setting
-145   * the range of valid times for this 
increment, you can potentially gain
-146   * some performance with a more optimal 
Get operation.
-147   * Be careful adding the time range to 
this class as you will update the old cell if the
-148   * time range doesn't include the 
latest cells.
-149   * p
-150   * This range is used as [minStamp, 
maxStamp).
-151   * @param minStamp minimum timestamp 
value, inclusive
-152   * @param maxStamp maximum timestamp 
value, exclusive
-153   * @throws IOException if invalid time 
range
-154   * @return this
-155   */
-156  public Increment setTimeRange(long 
minStamp, long maxStamp)
-157  throws IOException {
-158tr = new TimeRange(minStamp, 
maxStamp);
-159return this;
-160  }
-161
-162  @Override
-163  public Increment setTimestamp(long 
timestamp) {
-164super.setTimestamp(timestamp);
-165return this;
-166  }
-167
-168  /**
-169   * @param returnResults True (default) 
if the increment operation should return the results. A
-170   *  client that is not 
interested in the result can save network bandwidth setting this
-171   *  to false.
-172 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7c0589c0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
index 6fecbc9..2accda0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html
@@ -34,4140 +34,4141 @@
 026import 
java.nio.charset.StandardCharsets;
 027import java.util.ArrayList;
 028import java.util.Arrays;
-029import java.util.Collection;
-030import java.util.EnumSet;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.LinkedList;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Set;
-037import java.util.concurrent.Callable;
-038import 
java.util.concurrent.ExecutionException;
-039import java.util.concurrent.Future;
-040import java.util.concurrent.TimeUnit;
-041import 
java.util.concurrent.TimeoutException;
-042import 
java.util.concurrent.atomic.AtomicInteger;
-043import 
java.util.concurrent.atomic.AtomicReference;
-044import java.util.regex.Pattern;
-045import java.util.stream.Collectors;
-046import java.util.stream.Stream;
-047import 
org.apache.hadoop.conf.Configuration;
-048import 
org.apache.hadoop.hbase.Abortable;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-052import 
org.apache.hadoop.hbase.ClusterStatus;
-053import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HConstants;
-056import 
org.apache.hadoop.hbase.HRegionInfo;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.MasterNotRunningException;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-062import 
org.apache.hadoop.hbase.NamespaceNotFoundException;
-063import 
org.apache.hadoop.hbase.NotServingRegionException;
-064import 
org.apache.hadoop.hbase.RegionLoad;
-065import 
org.apache.hadoop.hbase.RegionLocations;
-066import 
org.apache.hadoop.hbase.ServerName;
-067import 
org.apache.hadoop.hbase.TableExistsException;
-068import 
org.apache.hadoop.hbase.TableName;
-069import 
org.apache.hadoop.hbase.TableNotDisabledException;
-070import 
org.apache.hadoop.hbase.TableNotFoundException;
-071import 
org.apache.hadoop.hbase.UnknownRegionException;
-072import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-073import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-074import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-075import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-076import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-077import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-078import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-080import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-081import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-082import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-083import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-084import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-085import 
org.apache.hadoop.hbase.replication.ReplicationException;
-086import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-087import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-088import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-089import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-090import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-091import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-092import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-093import 
org.apache.hadoop.hbase.util.Addressing;
-094import 
org.apache.hadoop.hbase.util.Bytes;
-095import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-096import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-097import 
org.apache.hadoop.hbase.util.Pair;
-098import 
org.apache.hadoop.ipc.RemoteException;
-099import 
org.apache.hadoop.util.StringUtils;
-100import 
org.apache.yetus.audience.InterfaceAudience;
-101import 
org.apache.yetus.audience.InterfaceStability;
-102import org.slf4j.Logger;
-103import org.slf4j.LoggerFactory;
-104
-105import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index e8c7328..2297e10 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2017 The Apache Software Foundation
 
-  File: 3458,
- Errors: 19378,
+  File: 3470,
+ Errors: 19312,
  Warnings: 0,
  Infos: 0
   
@@ -1063,7 +1063,7 @@ under the License.
   0
 
 
-  4
+  1
 
   
   
@@ -5221,7 +5221,7 @@ under the License.
   0
 
 
-  3
+  1
 
   
   
@@ -6780,6 +6780,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.TestSize.java;>org/apache/hadoop/hbase/TestSize.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.RestartMetaTest.java;>org/apache/hadoop/hbase/util/RestartMetaTest.java
 
 
@@ -8581,7 +8595,7 @@ under the License.
   0
 
 
-  4
+  1
 
   
   
@@ -10401,7 +10415,7 @@ under the License.
   0
 
 
-  4
+  3
 
   
   
@@ -11493,7 +11507,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -13481,7 +13495,7 @@ under the License.
   0
 
 
-  5
+  4
 
   
   
@@ -14620,6 +14634,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.TestServerMetrics.java;>org/apache/hadoop/hbase/TestServerMetrics.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.procedure.ModifyNamespaceProcedure.java;>org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
 
 
@@ -15147,7 +15175,7 @@ under the License.
   0
 
 
-  3
+  0
 
   
   
@@ -16678,20 +16706,6 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TestPut.java;>org/apache/hadoop/hbase/client/TestPut.java
-
-
-  0
-
-
-  0
-
-
-  0
-
-  
-  
-
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.RandomKeyValueUtil.java;>org/apache/hadoop/hbase/io/hfile/RandomKeyValueUtil.java
 
 
@@ -16827,7 +16841,7 @@ under the License.
   0
 
 
-  192
+  189
 
   
   
@@ -16897,7 +16911,7 @@ under the License.
   0
 
 
-  9
+  0
 
   
   
@@ -17154,6 +17168,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ServerMetricsBuilder.java;>org/apache/hadoop/hbase/ServerMetricsBuilder.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+   

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/JMXListener.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/JMXListener.html 
b/devapidocs/org/apache/hadoop/hbase/JMXListener.html
index 9a0219c..62a1305 100644
--- a/devapidocs/org/apache/hadoop/hbase/JMXListener.html
+++ b/devapidocs/org/apache/hadoop/hbase/JMXListener.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class JMXListener
+public class JMXListener
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements MasterCoprocessor, 
RegionServerCoprocessor
 Pluggable JMX Agent for HBase(to fix the 2 random TCP ports 
issue
@@ -172,7 +172,7 @@ implements 
-private static 
org.apache.commons.logging.Log
+private static org.slf4j.Logger
 LOG
 
 
@@ -303,7 +303,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -312,7 +312,7 @@ implements 
 
 RMI_REGISTRY_PORT_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RMI_REGISTRY_PORT_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RMI_REGISTRY_PORT_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -325,7 +325,7 @@ implements 
 
 RMI_CONNECTOR_PORT_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RMI_CONNECTOR_PORT_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RMI_CONNECTOR_PORT_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -338,7 +338,7 @@ implements 
 
 defMasterRMIRegistryPort
-public static finalint defMasterRMIRegistryPort
+public static finalint defMasterRMIRegistryPort
 
 See Also:
 Constant
 Field Values
@@ -351,7 +351,7 @@ implements 
 
 defRegionserverRMIRegistryPort
-public static finalint defRegionserverRMIRegistryPort
+public static finalint defRegionserverRMIRegistryPort
 
 See Also:
 Constant
 Field Values
@@ -364,7 +364,7 @@ implements 
 
 JMX_CS
-private statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXConnectorServer.html?is-external=true;
 title="class or interface in javax.management.remote">JMXConnectorServer 
JMX_CS
+private statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXConnectorServer.html?is-external=true;
 title="class or interface in javax.management.remote">JMXConnectorServer 
JMX_CS
 workaround for HBASE-11146
  master and regionserver are in 1 JVM in standalone mode
  only 1 JMX instance is allowed, otherwise there is port conflict even if
@@ -377,7 +377,7 @@ implements 
 
 rmiRegistry
-privatehttp://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true;
 title="class or interface in java.rmi.registry">Registry rmiRegistry
+privatehttp://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true;
 title="class or interface in java.rmi.registry">Registry rmiRegistry
 
 
 
@@ -394,7 +394,7 @@ implements 
 
 JMXListener
-publicJMXListener()
+publicJMXListener()
 
 
 
@@ -411,7 +411,7 @@ implements 
 
 buildJMXServiceURL
-public statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true;
 title="class or interface in 
javax.management.remote">JMXServiceURLbuildJMXServiceURL(intrmiRegistryPort,
+public statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true;
 title="class or interface in 
javax.management.remote">JMXServiceURLbuildJMXServiceURL(intrmiRegistryPort,
intrmiConnectorPort)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -426,7 +426,7 @@ implements 
 
 startConnectorServer
-publicvoidstartConnectorServer(intrmiRegistryPort,
+publicvoidstartConnectorServer(intrmiRegistryPort,
  intrmiConnectorPort)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -441,7 +441,7 @@ implements 
 
 stopConnectorServer
-publicvoidstopConnectorServer()
+publicvoidstopConnectorServer()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -455,7 +455,7 @@ implements 
 
 start

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html 
b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html
index 331a8cb..3ce8f8a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/Encryption.html
@@ -35,558 +35,557 @@
 027import java.util.Arrays;
 028import java.util.Map;
 029import 
java.util.concurrent.ConcurrentHashMap;
-030
-031import javax.crypto.SecretKeyFactory;
-032import javax.crypto.spec.PBEKeySpec;
-033import javax.crypto.spec.SecretKeySpec;
-034
-035import org.apache.commons.io.IOUtils;
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.conf.Configuration;
-039import 
org.apache.hadoop.hbase.HBaseConfiguration;
-040import 
org.apache.hadoop.hbase.HConstants;
-041import 
org.apache.yetus.audience.InterfaceAudience;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.Pair;
-044import 
org.apache.hadoop.util.ReflectionUtils;
-045
-046/**
-047 * A facade for encryption algorithms and 
related support.
-048 */
-049@InterfaceAudience.Public
-050public final class Encryption {
-051
-052  private static final Log LOG = 
LogFactory.getLog(Encryption.class);
-053
-054  /**
-055   * Crypto context
-056   */
-057  @InterfaceAudience.Public
-058  public static class Context extends 
org.apache.hadoop.hbase.io.crypto.Context {
-059
-060/** The null crypto context */
-061public static final Context NONE = 
new Context();
-062
-063private Context() {
-064  super();
-065}
-066
-067private Context(Configuration conf) 
{
-068  super(conf);
-069}
-070
-071@Override
-072public Context setCipher(Cipher 
cipher) {
-073  super.setCipher(cipher);
-074  return this;
-075}
-076
-077@Override
-078public Context setKey(Key key) {
-079  super.setKey(key);
-080  return this;
-081}
-082
-083public Context setKey(byte[] key) {
-084  super.setKey(new SecretKeySpec(key, 
getCipher().getName()));
-085  return this;
-086}
-087  }
-088
-089  public static Context newContext() {
-090return new Context();
-091  }
-092
-093  public static Context 
newContext(Configuration conf) {
-094return new Context(conf);
-095  }
-096
-097  // Prevent instantiation
-098  private Encryption() {
-099super();
-100  }
-101
-102  /**
-103   * Get an cipher given a name
-104   * @param name the cipher name
-105   * @return the cipher, or null if a 
suitable one could not be found
-106   */
-107  public static Cipher 
getCipher(Configuration conf, String name) {
-108return 
getCipherProvider(conf).getCipher(name);
-109  }
-110
-111  /**
-112   * Get names of supported encryption 
algorithms
-113   *
-114   * @return Array of strings, each 
represents a supported encryption algorithm
-115   */
-116  public static String[] 
getSupportedCiphers() {
-117return 
getSupportedCiphers(HBaseConfiguration.create());
-118  }
-119
-120  /**
-121   * Get names of supported encryption 
algorithms
-122   *
-123   * @return Array of strings, each 
represents a supported encryption algorithm
-124   */
-125  public static String[] 
getSupportedCiphers(Configuration conf) {
-126return 
getCipherProvider(conf).getSupportedCiphers();
-127  }
-128
-129  /**
-130   * Return the MD5 digest of the 
concatenation of the supplied arguments.
-131   */
-132  public static byte[] hash128(String... 
args) {
-133byte[] result = new byte[16];
-134try {
-135  MessageDigest md = 
MessageDigest.getInstance("MD5");
-136  for (String arg: args) {
-137md.update(Bytes.toBytes(arg));
-138  }
-139  md.digest(result, 0, 
result.length);
-140  return result;
-141} catch (NoSuchAlgorithmException e) 
{
-142  throw new RuntimeException(e);
-143} catch (DigestException e) {
-144  throw new RuntimeException(e);
-145}
-146  }
-147
-148  /**
-149   * Return the MD5 digest of the 
concatenation of the supplied arguments.
-150   */
-151  public static byte[] hash128(byte[]... 
args) {
-152byte[] result = new byte[16];
-153try {
-154  MessageDigest md = 
MessageDigest.getInstance("MD5");
-155  for (byte[] arg: args) {
-156md.update(arg);
-157  }
-158  md.digest(result, 0, 
result.length);
-159  return result;
-160} catch (NoSuchAlgorithmException e) 
{
-161  throw new RuntimeException(e);
-162} catch (DigestException e) {
-163  throw new RuntimeException(e);
-164}
-165  }
-166
-167  /**
-168   * Return the SHA-256 digest of the 
concatenation of the supplied arguments.
-169   */
-170  public static byte[] hash256(String... 
args) {
-171byte[] 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.SingleWriteExampleCallable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.SingleWriteExampleCallable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.SingleWriteExampleCallable.html
index 0b8baa8..c77170b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.SingleWriteExampleCallable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.SingleWriteExampleCallable.html
@@ -30,308 +30,325 @@
 022import org.apache.commons.logging.Log;
 023import 
org.apache.commons.logging.LogFactory;
 024import 
org.apache.hadoop.conf.Configured;
-025import 
org.apache.hadoop.hbase.TableName;
-026import 
org.apache.hadoop.hbase.client.Connection;
-027import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-028import 
org.apache.hadoop.hbase.client.Put;
-029import 
org.apache.hadoop.hbase.client.RegionLocator;
-030import 
org.apache.hadoop.hbase.client.Result;
-031import 
org.apache.hadoop.hbase.client.ResultScanner;
-032import 
org.apache.hadoop.hbase.client.Scan;
-033import 
org.apache.hadoop.hbase.client.Table;
-034import 
org.apache.hadoop.hbase.filter.KeyOnlyFilter;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036import org.apache.hadoop.util.Tool;
-037import 
org.apache.hadoop.util.ToolRunner;
-038
-039import java.io.IOException;
-040import java.util.ArrayList;
-041import java.util.List;
-042import java.util.concurrent.Callable;
-043import 
java.util.concurrent.ExecutorService;
-044import java.util.concurrent.Executors;
-045import 
java.util.concurrent.ForkJoinPool;
-046import java.util.concurrent.Future;
-047import 
java.util.concurrent.ThreadFactory;
-048import 
java.util.concurrent.ThreadLocalRandom;
-049import java.util.concurrent.TimeUnit;
-050
-051
-052/**
-053 * Example on how to use HBase's {@link 
Connection} and {@link Table} in a
-054 * multi-threaded environment. Each table 
is a light weight object
-055 * that is created and thrown away. 
Connections are heavy weight objects
-056 * that hold on to zookeeper connections, 
async processes, and other state.
-057 *
-058 * pre
-059 * Usage:
-060 * bin/hbase 
org.apache.hadoop.hbase.client.example.MultiThreadedClientExample testTableName 
50
-061 * /pre
-062 *
-063 * p
-064 * The table should already be created 
before running the command.
-065 * This example expects one column family 
named d.
-066 * /p
-067 * p
-068 * This is meant to show different 
operations that are likely to be
-069 * done in a real world application. 
These operations are:
-070 * /p
-071 *
-072 * ul
-073 *   li
-074 * 30% of all operations performed 
are batch writes.
-075 * 30 puts are created and sent out 
at a time.
-076 * The response for all puts is 
waited on.
-077 *   /li
-078 *   li
-079 * 20% of all operations are single 
writes.
-080 * A single put is sent out and the 
response is waited for.
-081 *   /li
-082 *   li
-083 * 50% of all operations are scans.
-084 * These scans start at a random 
place and scan up to 100 rows.
-085 *   /li
-086 * /ul
-087 *
-088 */
-089public class MultiThreadedClientExample 
extends Configured implements Tool {
-090  private static final Log LOG = 
LogFactory.getLog(MultiThreadedClientExample.class);
-091  private static final int 
DEFAULT_NUM_OPERATIONS = 50;
-092
-093  /**
-094   * The name of the column family.
-095   *
-096   * d for default.
-097   */
-098  private static final byte[] FAMILY = 
Bytes.toBytes("d");
-099
-100  /**
-101   * For the example we're just using one 
qualifier.
-102   */
-103  private static final byte[] QUAL = 
Bytes.toBytes("test");
-104
-105  private final ExecutorService 
internalPool;
-106
-107  private final int threads;
-108
-109  public MultiThreadedClientExample() 
throws IOException {
-110// Base number of threads.
-111// This represents the number of 
threads you application has
-112// that can be interacting with an 
hbase client.
-113this.threads = 
Runtime.getRuntime().availableProcessors() * 4;
-114
-115// Daemon threads are great for 
things that get shut down.
-116ThreadFactory threadFactory = new 
ThreadFactoryBuilder()
-117
.setDaemon(true).setNameFormat("internal-pol-%d").build();
-118
-119
-120this.internalPool = 
Executors.newFixedThreadPool(threads, threadFactory);
-121  }
+025import 
org.apache.hadoop.hbase.CellBuilder;
+026import 
org.apache.hadoop.hbase.CellBuilderFactory;
+027import 
org.apache.hadoop.hbase.CellBuilderType;
+028import 
org.apache.hadoop.hbase.TableName;
+029import 
org.apache.hadoop.hbase.client.Connection;
+030import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+031import 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.html
index 7c59e27..c904c56 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.html
@@ -119,4048 +119,4054 @@
 111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
 113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-150import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-151import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-152import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-153import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-154import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-155import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
-156import 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/devapidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html 
b/devapidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
index c00ba22..b6bfe18 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":42,"i11":10,"i12":42,"i13":10,"i14":10,"i15":42,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":42,"i32":10,"i33":42,"i34":10,"i35":10,"i36":42,"i37":10,"i38":10,"i39":10,"i40":10,"i41":42,"i42":10,"i43":42,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":42,"i53":42,"i54":42,"i55":42,"i56":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":42,"i11":42,"i12":10,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":42,"i35":10,"i36":42,"i37":10,"i38":10,"i39":42,"i40":10,"i41":10,"i42":10,"i43":10,"i44":42,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":42,"i56":42,"i57":42,"i58":42,"i59":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class RemoteHTable
+public class RemoteHTable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements Table
 HTable interface to remote tables accessed via REST 
gateway
@@ -137,10 +137,21 @@ implements Class and Description
 
 
+private class
+RemoteHTable.CheckAndMutateBuilderImpl
+
+
 (package private) class
 RemoteHTable.Scanner
 
 
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hadoop.hbase.client.Table
+Table.CheckAndMutateBuilder
+
 
 
 
@@ -325,11 +336,17 @@ implements CompareOperatorcompareOp,
   byte[]value,
   Deletedelete)
-Atomically checks if a row/family/qualifier value matches 
the expected
- value.
+Deprecated.
 
 
 
+Table.CheckAndMutateBuilder
+checkAndMutate(byte[]row,
+  byte[]family)
+Atomically checks if a row/family/qualifier value matches 
the expected value.
+
+
+
 boolean
 checkAndMutate(byte[]row,
   byte[]family,
@@ -340,7 +357,7 @@ implements Deprecated.
 
 
-
+
 boolean
 checkAndMutate(byte[]row,
   byte[]family,
@@ -348,21 +365,20 @@ implements CompareOperatorcompareOp,
   byte[]value,
   RowMutationsrm)
-Atomically checks if a row/family/qualifier value matches 
the expected value.
+Deprecated.
 
 
-
+
 boolean
 checkAndPut(byte[]row,
byte[]family,
byte[]qualifier,
byte[]value,
Putput)
-Atomically checks if a row/family/qualifier value matches 
the expected
- value.
+Deprecated.
 
 
-
+
 boolean
 checkAndPut(byte[]row,
byte[]family,
@@ -373,7 +389,7 @@ implements Deprecated.
 
 
-
+
 boolean
 checkAndPut(byte[]row,
byte[]family,
@@ -381,24 +397,23 @@ implements CompareOperatorcompareOp,
byte[]value,
Putput)
-Atomically checks if a row/family/qualifier value matches 
the expected
- value.
+Deprecated.
 
 
-
+
 void
 close()
 Releases any resources held or pending changes in internal 
buffers.
 
 
-
+
 CoprocessorRpcChannel
 coprocessorService(byte[]row)
 Creates and returns a RpcChannel instance 
connected to the
  table region containing the specified row.
 
 
-
+
 T extends 
com.google.protobuf.Service,Rhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],R
 coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
@@ -410,7 +425,7 @@ implements Service instance.
 
 
-
+
 T extends 
com.google.protobuf.Service,Rvoid
 coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
@@ -423,153 +438,169 @@ implements Service instance.
 
 
-

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
index c33e682..789ce29 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":9,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":9,"i81":10,"i82":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":9,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public static class ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
+public static class ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ColumnFamilyDescriptor, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 An ModifyableFamilyDescriptor contains information about a 
column family such as the
@@ -357,245 +357,237 @@ implements 
 boolean
-isCacheDataInL1()
-
-
-boolean
 isCacheDataOnWrite()
 
-
+
 boolean
 isCacheIndexesOnWrite()
 
-
+
 boolean
 isCompressTags()
 
-
+
 boolean
 isEvictBlocksOnClose()
 
-
+
 boolean
 isInMemory()
 
-
+
 boolean
 isMobEnabled()
 Gets whether the mob is enabled for the family.
 
 
-
+
 boolean
 isNewVersionBehavior()
 By default, HBase only consider timestamp in versions.
 
 
-
+
 boolean
 isPrefetchBlocksOnOpen()
 
-
+
 private static ColumnFamilyDescriptor
 parseFrom(byte[]bytes)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 removeConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Remove a configuration setting represented by the key from 
the
  configuration
 map.
 
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 removeValue(Byteskey)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setBlockCacheEnabled(booleanblockCacheEnabled)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setBlocksize(ints)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setBloomFilterType(BloomTypebt)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setCacheBloomsOnWrite(booleanvalue)
 
-
-ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
-setCacheDataInL1(booleanvalue)
-
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setCacheDataOnWrite(booleanvalue)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setCacheIndexesOnWrite(booleanvalue)
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 setCompactionCompressionType(Compression.Algorithmtype)
 Compression types 

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/dependency-info.html
--
diff --git a/hbase-build-configuration/dependency-info.html 
b/hbase-build-configuration/dependency-info.html
index 6583a46..45d3c10 100644
--- a/hbase-build-configuration/dependency-info.html
+++ b/hbase-build-configuration/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Build Configuration  Dependency 
Information
 
@@ -148,7 +148,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/dependency-management.html
--
diff --git a/hbase-build-configuration/dependency-management.html 
b/hbase-build-configuration/dependency-management.html
index 9b2b9d2..e2e6696 100644
--- a/hbase-build-configuration/dependency-management.html
+++ b/hbase-build-configuration/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Build Configuration  Project Dependency 
Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependencies.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/dependencies.html 
b/hbase-build-configuration/hbase-archetypes/dependencies.html
index 92e01bd..e45d3b3 100644
--- a/hbase-build-configuration/hbase-archetypes/dependencies.html
+++ b/hbase-build-configuration/hbase-archetypes/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Dependencies
 
@@ -330,7 +330,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 



[14/51] [partial] hbase-site git commit: Published site at .

2017-12-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/LogRoller.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/LogRoller.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/LogRoller.html
index 72713d6..70ea645 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/LogRoller.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/LogRoller.html
@@ -77,7 +77,7 @@
 069
 070  public void addWAL(final WAL wal) {
 071if (null == 
walNeedsRoll.putIfAbsent(wal, Boolean.FALSE)) {
-072  wal.registerWALActionsListener(new 
WALActionsListener.Base() {
+072  wal.registerWALActionsListener(new 
WALActionsListener() {
 073@Override
 074public void 
logRollRequested(boolean lowReplicas) {
 075  walNeedsRoll.put(wal, 
Boolean.TRUE);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
index 8b76f22..acfc040 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFlushContext.html
@@ -28,77 +28,75 @@
 020
 021import java.io.IOException;
 022import java.util.List;
-023
-024import org.apache.hadoop.fs.Path;
+023import org.apache.hadoop.fs.Path;
+024import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
 025import 
org.apache.yetus.audience.InterfaceAudience;
-026import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-027
-028/**
-029 * A package protected interface for a 
store flushing.
-030 * A store flush context carries the 
state required to prepare/flush/commit the store's cache.
-031 */
-032@InterfaceAudience.Private
-033interface StoreFlushContext {
-034
-035  /**
-036   * Prepare for a store flush (create 
snapshot)
-037   *
-038   * Requires pausing writes.
-039   *
-040   * A very short operation.
-041   */
-042  void prepare();
-043
-044  /**
-045   * Flush the cache (create the new 
store file)
-046   *
-047   * A length operation which doesn't 
require locking out any function
-048   * of the store.
-049   *
-050   * @throws IOException in case the 
flush fails
-051   */
-052  void flushCache(MonitoredTask status) 
throws IOException;
-053
-054  /**
-055   * Commit the flush - add the store 
file to the store and clear the
-056   * memstore snapshot.
+026
+027/**
+028 * A package protected interface for a 
store flushing.
+029 * A store flush context carries the 
state required to prepare/flush/commit the store's cache.
+030 */
+031@InterfaceAudience.Private
+032interface StoreFlushContext {
+033
+034  /**
+035   * Prepare for a store flush (create 
snapshot)
+036   * Requires pausing writes.
+037   * A very short operation.
+038   * @return The size of snapshot to 
flush
+039   */
+040  MemStoreSize prepare();
+041
+042  /**
+043   * Flush the cache (create the new 
store file)
+044   *
+045   * A length operation which doesn't 
require locking out any function
+046   * of the store.
+047   *
+048   * @throws IOException in case the 
flush fails
+049   */
+050  void flushCache(MonitoredTask status) 
throws IOException;
+051
+052  /**
+053   * Commit the flush - add the store 
file to the store and clear the
+054   * memstore snapshot.
+055   *
+056   * Requires pausing scans.
 057   *
-058   * Requires pausing scans.
+058   * A very short operation
 059   *
-060   * A very short operation
-061   *
-062   * @return
-063   * @throws IOException
-064   */
-065  boolean commit(MonitoredTask status) 
throws IOException;
-066
-067  /**
-068   * Similar to commit, but called in 
secondary region replicas for replaying the
-069   * flush cache from primary region. 
Adds the new files to the store, and drops the
-070   * snapshot depending on 
dropMemstoreSnapshot argument.
-071   * @param fileNames names of the 
flushed files
-072   * @param dropMemstoreSnapshot whether 
to drop the prepared memstore snapshot
-073   * @throws IOException
-074   */
-075  void replayFlush(ListString 
fileNames, boolean dropMemstoreSnapshot) throws IOException;
-076
-077  /**
-078   * Abort the snapshot preparation. 
Drops the snapshot if any.
-079   * @throws IOException
-080   */
-081  void abort() throws IOException;
-082
-083  /**
-084   * Returns the newly committed files 
from the flush. Called only if commit returns true
-085   * @return a list of Paths for new 
files
-086   */
-087  ListPath getCommittedFiles();
-088
-089  /**
-090   * @return the total file size for 
flush output files, in bytes
-091   */
-092  long getOutputFileSize();
-093}

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
index 25e368d..d0f781f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
@@ -25,798 +25,798 @@
 017 */
 018package 
org.apache.hadoop.hbase.io.asyncfs;
 019
-020import static 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
-021import static 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
-022import static 
org.apache.hadoop.fs.CreateFlag.CREATE;
-023import static 
org.apache.hadoop.fs.CreateFlag.OVERWRITE;
-024import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
-025import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
+020import static 
org.apache.hadoop.fs.CreateFlag.CREATE;
+021import static 
org.apache.hadoop.fs.CreateFlag.OVERWRITE;
+022import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
+023import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
+024import static 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
+025import static 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
 026import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;
 028import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
 029import static 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
 030
-031import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
-032import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
-033import 
com.google.protobuf.CodedOutputStream;
-034
-035import 
org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
-036import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBuf;
-037import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufAllocator;
-038import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufOutputStream;
-039import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.PooledByteBufAllocator;
-040import 
org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
-041import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFuture;
-042import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFutureListener;
-043import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler;
-044import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
-045import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInitializer;
-046import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPipeline;
-047import 
org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
-048import 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler;
-049import 
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
-050import 
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-051import 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateEvent;
-052import 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateHandler;
-053import 
org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Future;
-054import 
org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.FutureListener;
-055import 
org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
-056
-057import java.io.IOException;
-058import 
java.lang.reflect.InvocationTargetException;
-059import java.lang.reflect.Method;
-060import java.util.ArrayList;
-061import java.util.EnumSet;
-062import java.util.List;
-063import java.util.concurrent.TimeUnit;
-064
-065import org.apache.commons.logging.Log;
-066import 
org.apache.commons.logging.LogFactory;
-067import 
org.apache.hadoop.conf.Configuration;
-068import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-069import 
org.apache.hadoop.crypto.Encryptor;
-070import org.apache.hadoop.fs.CreateFlag;
-071import org.apache.hadoop.fs.FileSystem;
-072import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-073import org.apache.hadoop.fs.Path;
-074import 
org.apache.hadoop.fs.UnresolvedLinkException;
-075import 

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
index d438f22..7c59e27 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
@@ -1290,8 +1290,8 @@
 1282   CompactType 
compactType) throws IOException {
 1283switch (compactType) {
 1284  case MOB:
-1285
compact(this.connection.getAdminForMaster(), getMobRegionInfo(tableName), 
major,
-1286  columnFamily);
+1285
compact(this.connection.getAdminForMaster(), 
RegionInfo.createMobRegionInfo(tableName),
+1286major, columnFamily);
 1287break;
 1288  case NORMAL:
 1289checkTableExists(tableName);
@@ -3248,7 +3248,7 @@
 3240  new 
CallableAdminProtos.GetRegionInfoResponse.CompactionState() {
 3241@Override
 3242public 
AdminProtos.GetRegionInfoResponse.CompactionState call() throws Exception {
-3243  RegionInfo info = 
getMobRegionInfo(tableName);
+3243  RegionInfo info = 
RegionInfo.createMobRegionInfo(tableName);
 3244  GetRegionInfoRequest 
request =
 3245
RequestConverter.buildGetRegionInfoRequest(info.getRegionName(), true);
 3246  GetRegionInfoResponse 
response = masterAdmin.getRegionInfo(rpcController, request);
@@ -3312,7 +3312,7 @@
 3304}
 3305break;
 3306  default:
-3307throw new 
IllegalArgumentException("Unknowne compactType: " + compactType);
+3307throw new 
IllegalArgumentException("Unknown compactType: " + compactType);
 3308}
 3309if (state != null) {
 3310  return 
ProtobufUtil.createCompactionState(state);
@@ -3847,325 +3847,320 @@
 3839});
 3840  }
 3841
-3842  private RegionInfo 
getMobRegionInfo(TableName tableName) {
-3843return 
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes(".mob")).setRegionId(0)
-3844.build();
-3845  }
-3846
-3847  private RpcControllerFactory 
getRpcControllerFactory() {
-3848return this.rpcControllerFactory;
-3849  }
-3850
-3851  @Override
-3852  public void addReplicationPeer(String 
peerId, ReplicationPeerConfig peerConfig, boolean enabled)
-3853  throws IOException {
-3854executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3855  @Override
-3856  protected Void rpcCall() throws 
Exception {
-3857
master.addReplicationPeer(getRpcController(),
-3858  
RequestConverter.buildAddReplicationPeerRequest(peerId, peerConfig, 
enabled));
-3859return null;
-3860  }
-3861});
-3862  }
-3863
-3864  @Override
-3865  public void 
removeReplicationPeer(String peerId) throws IOException {
-3866executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3867  @Override
-3868  protected Void rpcCall() throws 
Exception {
-3869
master.removeReplicationPeer(getRpcController(),
-3870  
RequestConverter.buildRemoveReplicationPeerRequest(peerId));
-3871return null;
-3872  }
-3873});
-3874  }
-3875
-3876  @Override
-3877  public void 
enableReplicationPeer(final String peerId) throws IOException {
-3878executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3879  @Override
-3880  protected Void rpcCall() throws 
Exception {
-3881
master.enableReplicationPeer(getRpcController(),
-3882  
RequestConverter.buildEnableReplicationPeerRequest(peerId));
-3883return null;
-3884  }
-3885});
-3886  }
-3887
-3888  @Override
-3889  public void 
disableReplicationPeer(final String peerId) throws IOException {
-3890executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3891  @Override
-3892  protected Void rpcCall() throws 
Exception {
-3893
master.disableReplicationPeer(getRpcController(),
-3894  
RequestConverter.buildDisableReplicationPeerRequest(peerId));
-3895return null;
-3896  }
-3897});
-3898  }
-3899
-3900  @Override
-3901  public ReplicationPeerConfig 
getReplicationPeerConfig(final String peerId) throws IOException {
-3902return executeCallable(new 
MasterCallableReplicationPeerConfig(getConnection(),
-3903getRpcControllerFactory()) {
-3904  @Override
-3905  protected ReplicationPeerConfig 
rpcCall() throws Exception {
-3906GetReplicationPeerConfigResponse 
response = master.getReplicationPeerConfig(
-3907  getRpcController(), 

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
index 29ea7b3..6ed75c9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
@@ -1313,7093 +1313,7082 @@
 1305
 1306  @Override
 1307  public boolean isSplittable() {
-1308boolean result = isAvailable() 
 !hasReferences();
-1309LOG.info("ASKED IF SPLITTABLE " + 
result + " " + getRegionInfo().getShortNameToLog(),
-1310  new Throwable("LOGGING: 
REMOVE"));
-1311// REMOVE BELOW
-1312LOG.info("DEBUG LIST ALL FILES");
-1313for (HStore store : 
this.stores.values()) {
-1314  LOG.info("store " + 
store.getColumnFamilyName());
-1315  for (HStoreFile sf : 
store.getStorefiles()) {
-1316
LOG.info(sf.toStringDetailed());
-1317  }
-1318}
-1319return result;
-1320  }
-1321
-1322  @Override
-1323  public boolean isMergeable() {
-1324if (!isAvailable()) {
-1325  LOG.debug("Region " + this
-1326  + " is not mergeable because 
it is closing or closed");
-1327  return false;
-1328}
-1329if (hasReferences()) {
-1330  LOG.debug("Region " + this
-1331  + " is not mergeable because 
it has references");
-1332  return false;
-1333}
-1334
-1335return true;
+1308return isAvailable()  
!hasReferences();
+1309  }
+1310
+1311  @Override
+1312  public boolean isMergeable() {
+1313if (!isAvailable()) {
+1314  LOG.debug("Region " + this
+1315  + " is not mergeable because 
it is closing or closed");
+1316  return false;
+1317}
+1318if (hasReferences()) {
+1319  LOG.debug("Region " + this
+1320  + " is not mergeable because 
it has references");
+1321  return false;
+1322}
+1323
+1324return true;
+1325  }
+1326
+1327  public boolean areWritesEnabled() {
+1328synchronized(this.writestate) {
+1329  return 
this.writestate.writesEnabled;
+1330}
+1331  }
+1332
+1333  @VisibleForTesting
+1334  public MultiVersionConcurrencyControl 
getMVCC() {
+1335return mvcc;
 1336  }
 1337
-1338  public boolean areWritesEnabled() {
-1339synchronized(this.writestate) {
-1340  return 
this.writestate.writesEnabled;
-1341}
-1342  }
-1343
-1344  @VisibleForTesting
-1345  public MultiVersionConcurrencyControl 
getMVCC() {
-1346return mvcc;
-1347  }
-1348
-1349  @Override
-1350  public long getMaxFlushedSeqId() {
-1351return maxFlushedSeqId;
+1338  @Override
+1339  public long getMaxFlushedSeqId() {
+1340return maxFlushedSeqId;
+1341  }
+1342
+1343  /**
+1344   * @return readpoint considering given 
IsolationLevel. Pass {@code null} for default
+1345   */
+1346  public long 
getReadPoint(IsolationLevel isolationLevel) {
+1347if (isolationLevel != null 
 isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
+1348  // This scan can read even 
uncommitted transactions
+1349  return Long.MAX_VALUE;
+1350}
+1351return mvcc.getReadPoint();
 1352  }
 1353
-1354  /**
-1355   * @return readpoint considering given 
IsolationLevel. Pass {@code null} for default
-1356   */
-1357  public long 
getReadPoint(IsolationLevel isolationLevel) {
-1358if (isolationLevel != null 
 isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
-1359  // This scan can read even 
uncommitted transactions
-1360  return Long.MAX_VALUE;
-1361}
-1362return mvcc.getReadPoint();
-1363  }
-1364
-1365  public boolean 
isLoadingCfsOnDemandDefault() {
-1366return 
this.isLoadingCfsOnDemandDefault;
-1367  }
-1368
-1369  /**
-1370   * Close down this HRegion.  Flush the 
cache, shut down each HStore, don't
-1371   * service any more calls.
-1372   *
-1373   * pThis method could take 
some time to execute, so don't call it from a
-1374   * time-sensitive thread.
-1375   *
-1376   * @return Vector of all the storage 
files that the HRegion's component
-1377   * HStores make use of.  It's a list 
of all StoreFile objects. Returns empty
-1378   * vector if already closed and null 
if judged that it should not close.
-1379   *
-1380   * @throws IOException e
-1381   * @throws DroppedSnapshotException 
Thrown when replay of wal is required
-1382   * because a Snapshot was not properly 
persisted. The region is put in closing mode, and the
-1383   * caller MUST abort after this.
-1384   */
-1385  public Mapbyte[], 
ListHStoreFile close() throws IOException {
-1386return close(false);
-1387  }
-1388
-1389  private final Object closeLock = new 
Object();
-1390
-1391  /** Conf key for the periodic flush 
interval */

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCell.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCell.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCell.html
index 9696734..c0f5783 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCell.html
@@ -29,8 +29,8 @@
 021import java.io.OutputStream;
 022import java.nio.ByteBuffer;
 023
-024import 
org.apache.yetus.audience.InterfaceAudience;
-025import 
org.apache.hadoop.hbase.io.HeapSize;
+024import 
org.apache.hadoop.hbase.io.HeapSize;
+025import 
org.apache.yetus.audience.InterfaceAudience;
 026
 027/**
 028 * Extension to {@link Cell} with server 
side required functions. Server side Cell implementations
@@ -38,8 +38,8 @@
 030 * @see SettableSequenceId
 031 * @see SettableTimestamp
 032 */
-033@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
-034public interface ExtendedCell extends 
Cell, SettableSequenceId, SettableTimestamp, HeapSize,
+033@InterfaceAudience.Private
+034public interface ExtendedCell extends 
RawCell, SettableSequenceId, SettableTimestamp, HeapSize,
 035Cloneable {
 036
 037  public static int 
CELL_NOT_BASED_ON_CHUNK = -1;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
index b93bf76..8db1118 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilder.html
@@ -34,7 +34,7 @@
 026 * Use {@link ExtendedCellBuilderFactory} 
to get ExtendedCellBuilder instance.
 027 * TODO: ditto for ByteBufferCell?
 028 */
-029@InterfaceAudience.Private
+029@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
 030public interface ExtendedCellBuilder 
extends CellBuilder {
 031  @Override
 032  ExtendedCellBuilder setRow(final byte[] 
row);
@@ -70,11 +70,18 @@
 062  @Override
 063  ExtendedCellBuilder clear();
 064
-065  ExtendedCellBuilder setTags(final 
byte[] tags);
-066  ExtendedCellBuilder setTags(final 
byte[] tags, int tagsOffset, int tagsLength);
-067
-068  ExtendedCellBuilder setSequenceId(final 
long seqId);
-069}
+065  // TODO : While creating RawCellBuilder 
allow 'Tag' to be passed instead of byte[]
+066  ExtendedCellBuilder setTags(final 
byte[] tags);
+067  // TODO : While creating RawCellBuilder 
allow 'Tag' to be passed instead of byte[]
+068  ExtendedCellBuilder setTags(final 
byte[] tags, int tagsOffset, int tagsLength);
+069
+070  /**
+071   * Internal usage. Be careful before 
you use this while building a cell
+072   * @param seqId set the seqId
+073   * @return the current 
ExternalCellBuilder
+074   */
+075  ExtendedCellBuilder setSequenceId(final 
long seqId);
+076}
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.html
index 06f9ab0..a9787df 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.html
@@ -33,19 +33,32 @@
 025public final class 
ExtendedCellBuilderFactory {
 026
 027  public static ExtendedCellBuilder 
create(CellBuilderType type) {
-028switch (type) {
-029  case SHALLOW_COPY:
-030return new 
IndividualBytesFieldCellBuilder();
-031  case DEEP_COPY:
-032return new KeyValueBuilder();
-033  default:
-034throw new 
UnsupportedOperationException("The type:" + type + " is unsupported");
-035}
-036  }
-037
-038  private ExtendedCellBuilderFactory(){
-039  }
-040}
+028return create(type, true);
+029  }
+030
+031  /**
+032   * Allows creating a cell with the 
given CellBuilderType.
+033   * @param type the type of 
CellBuilder(DEEP_COPY or SHALLOW_COPY).
+034   * @param allowSeqIdUpdate if seqId can 
be updated. CPs are not allowed to update
+035   *the seqId
+036   * @return the cell that is created
+037   */
+038  public static ExtendedCellBuilder 
create(CellBuilderType type, boolean allowSeqIdUpdate) {
+039switch (type) {
+040  case SHALLOW_COPY:
+041// CPs are not allowed to update 
seqID and they always use DEEP_COPY. So we have not
+042// passing 'allowSeqIdUpdate' to 
IndividualBytesFieldCellBuilder
+043return new 

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9118853f/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
index f8edf1b..0e93257 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Generated Reports
 
@@ -128,7 +128,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-22
+  Last Published: 
2017-11-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9118853f/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
index 1bd4072..8711043 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Summary
 
@@ -166,7 +166,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-22
+  Last Published: 
2017-11-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9118853f/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/source-repository.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/source-repository.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/source-repository.html
index c8ccd44..b9dc3ca 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/source-repository.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/source-repository.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Source Code Management
 
@@ -134,7 +134,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-22
+  Last Published: 
2017-11-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9118853f/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/team-list.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/team-list.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/team-list.html
index ba9d816..c48c6df 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/team-list.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Team
 
@@ -547,7 +547,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-22
+  Last Published: 
2017-11-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9118853f/hbase-build-configuration/hbase-archetypes/index.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/index.html 
b/hbase-build-configuration/hbase-archetypes/index.html
index b538b04..5fb67f6 100644
--- a/hbase-build-configuration/hbase-archetypes/index.html
+++ b/hbase-build-configuration/hbase-archetypes/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  About
 
@@ -135,7 +135,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 

  1   2   3   >