[12/51] [partial] hbase-site git commit: Published site at 620d70d6186fb800299bcc62ad7179fccfd1be41.

2019-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.html
index 574f317..f78671d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.html
@@ -39,403 +39,405 @@
 031import 
org.apache.hadoop.hbase.ServerName;
 032import 
org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
 033import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-034import 
org.apache.hadoop.hbase.util.FSUtils;
-035import 
org.apache.hadoop.hbase.util.LeaseNotRecoveredException;
-036import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-037import 
org.apache.hadoop.hbase.wal.WAL.Reader;
-038import 
org.apache.hadoop.hbase.wal.WALFactory;
-039import 
org.apache.hadoop.ipc.RemoteException;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041import 
org.apache.yetus.audience.InterfaceStability;
-042import org.slf4j.Logger;
-043import org.slf4j.LoggerFactory;
-044
-045/**
-046 * Streaming access to WAL entries. This 
class is given a queue of WAL {@link Path}, and continually
-047 * iterates through all the WAL {@link 
Entry} in the queue. When it's done reading from a Path, it
-048 * dequeues it and starts reading from 
the next.
-049 */
-050@InterfaceAudience.Private
-051@InterfaceStability.Evolving
-052class WALEntryStream implements Closeable 
{
-053  private static final Logger LOG = 
LoggerFactory.getLogger(WALEntryStream.class);
-054
-055  private Reader reader;
-056  private Path currentPath;
-057  // cache of next entry for hasNext()
-058  private Entry currentEntry;
-059  // position for the current entry. As 
now we support peek, which means that the upper layer may
-060  // choose to return before reading the 
current entry, so it is not safe to return the value below
-061  // in getPosition.
-062  private long currentPositionOfEntry = 
0;
-063  // position after reading current 
entry
-064  private long currentPositionOfReader = 
0;
-065  private final 
PriorityBlockingQueuePath logQueue;
-066  private final FileSystem fs;
-067  private final Configuration conf;
-068  private final WALFileLengthProvider 
walFileLengthProvider;
-069  // which region server the WALs belong 
to
-070  private final ServerName serverName;
-071  private final MetricsSource metrics;
-072
-073  /**
-074   * Create an entry stream over the 
given queue at the given start position
-075   * @param logQueue the queue of WAL 
paths
-076   * @param fs {@link FileSystem} to use 
to create {@link Reader} for this stream
-077   * @param conf {@link Configuration} to 
use to create {@link Reader} for this stream
-078   * @param startPosition the position in 
the first WAL to start reading at
-079   * @param serverName the server name 
which all WALs belong to
-080   * @param metrics replication metrics
-081   * @throws IOException
-082   */
-083  public 
WALEntryStream(PriorityBlockingQueuePath logQueue, FileSystem fs, 
Configuration conf,
-084  long startPosition, 
WALFileLengthProvider walFileLengthProvider, ServerName serverName,
-085  MetricsSource metrics) throws 
IOException {
-086this.logQueue = logQueue;
-087this.fs = fs;
-088this.conf = conf;
-089this.currentPositionOfEntry = 
startPosition;
-090this.walFileLengthProvider = 
walFileLengthProvider;
-091this.serverName = serverName;
-092this.metrics = metrics;
-093  }
-094
-095  /**
-096   * @return true if there is another WAL 
{@link Entry}
-097   */
-098  public boolean hasNext() throws 
IOException {
-099if (currentEntry == null) {
-100  tryAdvanceEntry();
-101}
-102return currentEntry != null;
-103  }
-104
-105  /**
-106   * Returns the next WAL entry in this 
stream but does not advance.
-107   */
-108  public Entry peek() throws IOException 
{
-109return hasNext() ? currentEntry: 
null;
-110  }
-111
-112  /**
-113   * Returns the next WAL entry in this 
stream and advance the stream.
-114   */
-115  public Entry next() throws IOException 
{
-116Entry save = peek();
-117currentPositionOfEntry = 
currentPositionOfReader;
-118currentEntry = null;
-119return save;
-120  }
-121
-122  /**
-123   * {@inheritDoc}
-124   */
-125  @Override
-126  public void close() throws IOException 
{
-127closeReader();
-128  }
-129
-130  /**
-131   * @return the position of the last 
Entry returned by next()
-132   */
-133  public long getPosition() {
-134return currentPositionOfEntry;
-135  }
-136
-137  /**
-138   * @return the {@link Path} of the 
current WAL
-139   */
-140  public Path getCurrentPath() {
-141return currentPath;

[12/51] [partial] hbase-site git commit: Published site at 281d6429e55149cc4c05430dcc1d1dc136d8b245.

2019-01-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
index 7ca5dc1..1adf97b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
@@ -191,6 +191,10 @@
 private ProcedurePrepareLatch
 AbstractStateMachineNamespaceProcedure.syncLatch
 
+
+(package private) ProcedurePrepareLatch
+SwitchRpcThrottleProcedure.syncLatch
+
 
 
 
@@ -302,6 +306,12 @@
 ProcedurePrepareLatchlatch)
 
 
+SwitchRpcThrottleProcedure(RpcThrottleStoragerpcThrottleStorage,
+  booleanrpcThrottleEnabled,
+  ServerNameserverName,
+  ProcedurePrepareLatchsyncLatch)
+
+
 TruncateTableProcedure(MasterProcedureEnvenv,
   TableNametableName,
   booleanpreserveSplits,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
index e51337a..bab4008 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.ServerOperationType.html
@@ -104,6 +104,14 @@
 
 
 ServerProcedureInterface.ServerOperationType
+SwitchRpcThrottleRemoteProcedure.getServerOperationType()
+
+
+ServerProcedureInterface.ServerOperationType
+SplitWALRemoteProcedure.getServerOperationType()
+
+
+ServerProcedureInterface.ServerOperationType
 ServerProcedureInterface.getServerOperationType()
 Given an operation type we can take decisions about what to 
do with pending operations.
 
@@ -113,6 +121,14 @@
 ServerCrashProcedure.getServerOperationType()
 
 
+ServerProcedureInterface.ServerOperationType
+SplitWALProcedure.getServerOperationType()
+
+
+ServerProcedureInterface.ServerOperationType
+SwitchRpcThrottleProcedure.getServerOperationType()
+
+
 static ServerProcedureInterface.ServerOperationType
 ServerProcedureInterface.ServerOperationType.valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.html
index 1e5b723..59bc711 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ServerProcedureInterface.html
@@ -108,6 +108,30 @@
 Handle crashed server.
 
 
+
+class
+SplitWALProcedure
+The procedure is to split a WAL.
+
+
+
+class
+SplitWALRemoteProcedure
+A remote procedure which is used to send split WAL request 
to region server.
+
+
+
+class
+SwitchRpcThrottleProcedure
+The procedure to switch rpc throttle
+
+
+
+class
+SwitchRpcThrottleRemoteProcedure
+The procedure to switch rpc throttle on region server
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/SplitWALProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/SplitWALProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/SplitWALProcedure.html
new file mode 100644
index 000..3c30d21
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/SplitWALProcedure.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.master.procedure.SplitWALProcedure (Apache HBase 
3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":9,"i1":9,"i2":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Class ProcedureTestUtil
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.ProcedureTestUtil
+
+
+
+
+
+
+
+
+public final class ProcedureTestUtil
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private static org.slf4j.Logger
+LOG
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+private 
+ProcedureTestUtil()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in 
java.util">Optionalorg.apache.hbase.thirdparty.com.google.gson.JsonObject
+getProcedure(HBaseTestingUtilityutil,
+https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.procedure2.Procedure?clazz,
+
org.apache.hbase.thirdparty.com.google.gson.JsonParserparser)
+
+
+static void
+waitUntilProcedureTimeoutIncrease(HBaseTestingUtilityutil,
+ https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.procedure2.Procedure?clazz,
+ inttimes)
+
+
+static void
+waitUntilProcedureWaitingTimeout(HBaseTestingUtilityutil,
+https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.procedure2.Procedure?clazz,
+longtimeout)
+
+
+
+
+
+
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, 

[12/51] [partial] hbase-site git commit: Published site at d7e08317d2f214e4cca7b67578aba0ed7a567d54.

2018-09-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37cf49a6/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
index 566f410..da040ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
@@ -341,8361 +341,8425 @@
 333  private final int 
rowLockWaitDuration;
 334  static final int 
DEFAULT_ROWLOCK_WAIT_DURATION = 3;
 335
-336  // The internal wait duration to 
acquire a lock before read/update
-337  // from the region. It is not per row. 
The purpose of this wait time
-338  // is to avoid waiting a long time 
while the region is busy, so that
-339  // we can release the IPC handler soon 
enough to improve the
-340  // availability of the region server. 
It can be adjusted by
-341  // tuning configuration 
"hbase.busy.wait.duration".
-342  final long busyWaitDuration;
-343  static final long 
DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
-344
-345  // If updating multiple rows in one 
call, wait longer,
-346  // i.e. waiting for busyWaitDuration * 
# of rows. However,
-347  // we can limit the max multiplier.
-348  final int maxBusyWaitMultiplier;
-349
-350  // Max busy wait duration. There is no 
point to wait longer than the RPC
-351  // purge timeout, when a RPC call will 
be terminated by the RPC engine.
-352  final long maxBusyWaitDuration;
-353
-354  // Max cell size. If nonzero, the 
maximum allowed size for any given cell
-355  // in bytes
-356  final long maxCellSize;
-357
-358  // Number of mutations for minibatch 
processing.
-359  private final int miniBatchSize;
+336  private Path regionDir;
+337  private FileSystem walFS;
+338
+339  // The internal wait duration to 
acquire a lock before read/update
+340  // from the region. It is not per row. 
The purpose of this wait time
+341  // is to avoid waiting a long time 
while the region is busy, so that
+342  // we can release the IPC handler soon 
enough to improve the
+343  // availability of the region server. 
It can be adjusted by
+344  // tuning configuration 
"hbase.busy.wait.duration".
+345  final long busyWaitDuration;
+346  static final long 
DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
+347
+348  // If updating multiple rows in one 
call, wait longer,
+349  // i.e. waiting for busyWaitDuration * 
# of rows. However,
+350  // we can limit the max multiplier.
+351  final int maxBusyWaitMultiplier;
+352
+353  // Max busy wait duration. There is no 
point to wait longer than the RPC
+354  // purge timeout, when a RPC call will 
be terminated by the RPC engine.
+355  final long maxBusyWaitDuration;
+356
+357  // Max cell size. If nonzero, the 
maximum allowed size for any given cell
+358  // in bytes
+359  final long maxCellSize;
 360
-361  // negative number indicates infinite 
timeout
-362  static final long 
DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
-363  final ExecutorService 
rowProcessorExecutor = Executors.newCachedThreadPool();
-364
-365  private final 
ConcurrentHashMapRegionScanner, Long scannerReadPoints;
-366
-367  /**
-368   * The sequence ID that was 
enLongAddered when this region was opened.
-369   */
-370  private long openSeqNum = 
HConstants.NO_SEQNUM;
-371
-372  /**
-373   * The default setting for whether to 
enable on-demand CF loading for
-374   * scan requests to this region. 
Requests can override it.
-375   */
-376  private boolean 
isLoadingCfsOnDemandDefault = false;
-377
-378  private final AtomicInteger 
majorInProgress = new AtomicInteger(0);
-379  private final AtomicInteger 
minorInProgress = new AtomicInteger(0);
+361  // Number of mutations for minibatch 
processing.
+362  private final int miniBatchSize;
+363
+364  // negative number indicates infinite 
timeout
+365  static final long 
DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
+366  final ExecutorService 
rowProcessorExecutor = Executors.newCachedThreadPool();
+367
+368  private final 
ConcurrentHashMapRegionScanner, Long scannerReadPoints;
+369
+370  /**
+371   * The sequence ID that was 
enLongAddered when this region was opened.
+372   */
+373  private long openSeqNum = 
HConstants.NO_SEQNUM;
+374
+375  /**
+376   * The default setting for whether to 
enable on-demand CF loading for
+377   * scan requests to this region. 
Requests can override it.
+378   */
+379  private boolean 
isLoadingCfsOnDemandDefault = false;
 380
-381  //
-382  // Context: During replay we want to 
ensure that we do not lose any data. So, we
-383  // have to be conservative in how we 
replay wals. For each store, we calculate
-384  // the maxSeqId up to which the 

[12/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;

[12/51] [partial] hbase-site git commit: Published site at cd161d976ef47b84e904f2d54bac65d2f3417c2a.

2018-09-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fa1bebf8/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
index 2c14c50..43c66a8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
@@ -46,2104 +46,2113 @@
 038import 
java.util.concurrent.atomic.AtomicLong;
 039import java.util.stream.Collectors;
 040import java.util.stream.Stream;
-041import 
org.apache.hadoop.conf.Configuration;
-042import 
org.apache.hadoop.hbase.HConstants;
-043import 
org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
-044import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-045import 
org.apache.hadoop.hbase.procedure2.Procedure.LockState;
-046import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
-047import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
-048import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
-049import 
org.apache.hadoop.hbase.security.User;
-050import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-051import 
org.apache.hadoop.hbase.util.IdLock;
-052import 
org.apache.hadoop.hbase.util.NonceKey;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.yetus.audience.InterfaceAudience;
-055import org.slf4j.Logger;
-056import org.slf4j.LoggerFactory;
-057
-058import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-059import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-060
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
-062
-063/**
-064 * Thread Pool that executes the 
submitted procedures.
-065 * The executor has a ProcedureStore 
associated.
-066 * Each operation is logged and on 
restart the pending procedures are resumed.
-067 *
-068 * Unless the Procedure code throws an 
error (e.g. invalid user input)
-069 * the procedure will complete (at some 
point in time), On restart the pending
-070 * procedures are resumed and the once 
failed will be rolledback.
-071 *
-072 * The user can add procedures to the 
executor via submitProcedure(proc)
-073 * check for the finished state via 
isFinished(procId)
-074 * and get the result via 
getResult(procId)
-075 */
-076@InterfaceAudience.Private
-077public class 
ProcedureExecutorTEnvironment {
-078  private static final Logger LOG = 
LoggerFactory.getLogger(ProcedureExecutor.class);
-079
-080  public static final String 
CHECK_OWNER_SET_CONF_KEY = "hbase.procedure.check.owner.set";
-081  private static final boolean 
DEFAULT_CHECK_OWNER_SET = false;
-082
-083  public static final String 
WORKER_KEEP_ALIVE_TIME_CONF_KEY =
-084  
"hbase.procedure.worker.keep.alive.time.msec";
-085  private static final long 
DEFAULT_WORKER_KEEP_ALIVE_TIME = TimeUnit.MINUTES.toMillis(1);
-086
-087  /**
-088   * {@link #testing} is non-null when 
ProcedureExecutor is being tested. Tests will try to
-089   * break PE having it fail at various 
junctures. When non-null, testing is set to an instance of
-090   * the below internal {@link Testing} 
class with flags set for the particular test.
-091   */
-092  Testing testing = null;
-093
-094  /**
-095   * Class with parameters describing how 
to fail/die when in testing-context.
-096   */
-097  public static class Testing {
-098protected boolean killIfHasParent = 
true;
-099protected boolean killIfSuspended = 
false;
-100
-101/**
-102 * Kill the PE BEFORE we store state 
to the WAL. Good for figuring out if a Procedure is
-103 * persisting all the state it needs 
to recover after a crash.
-104 */
-105protected boolean 
killBeforeStoreUpdate = false;
-106protected boolean 
toggleKillBeforeStoreUpdate = false;
-107
-108/**
-109 * Set when we want to fail AFTER 
state has been stored into the WAL. Rarely used. HBASE-20978
-110 * is about a case where memory-state 
was being set after store to WAL where a crash could
-111 * cause us to get stuck. This flag 
allows killing at what was a vulnerable time.
-112 */
-113protected boolean 
killAfterStoreUpdate = false;
-114protected boolean 
toggleKillAfterStoreUpdate = false;
-115
-116protected boolean 
shouldKillBeforeStoreUpdate() {
-117  final boolean kill = 
this.killBeforeStoreUpdate;
-118  if 
(this.toggleKillBeforeStoreUpdate) {
-119this.killBeforeStoreUpdate = 
!kill;
-120LOG.warn("Toggle KILL before 
store update to: " + this.killBeforeStoreUpdate);
-121  }
-122  return kill;
-123}
-124
-125

[12/51] [partial] hbase-site git commit: Published site at c6a65ba63fce85ac7c4b62b96ef2bbe6c35d2f00.

2018-09-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/293abb17/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
index c372545..af3b364 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
@@ -1279,322 +1279,339 @@
 1271ListRegionInfo 
lastFewRegions = new ArrayList();
 1272// assign the remaining by going 
through the list and try to assign to servers one-by-one
 1273int serverIdx = 
RANDOM.nextInt(numServers);
-1274for (RegionInfo region : 
unassignedRegions) {
+1274OUTER : for (RegionInfo region : 
unassignedRegions) {
 1275  boolean assigned = false;
-1276  for (int j = 0; j  numServers; 
j++) { // try all servers one by one
+1276  INNER : for (int j = 0; j  
numServers; j++) { // try all servers one by one
 1277ServerName serverName = 
servers.get((j + serverIdx) % numServers);
 1278if 
(!cluster.wouldLowerAvailability(region, serverName)) {
 1279  ListRegionInfo 
serverRegions =
 1280  
assignments.computeIfAbsent(serverName, k - new ArrayList());
-1281  serverRegions.add(region);
-1282  cluster.doAssignRegion(region, 
serverName);
-1283  serverIdx = (j + serverIdx + 
1) % numServers; //remain from next server
-1284  assigned = true;
-1285  break;
-1286}
-1287  }
-1288  if (!assigned) {
-1289lastFewRegions.add(region);
-1290  }
-1291}
-1292// just sprinkle the rest of the 
regions on random regionservers. The balanceCluster will
-1293// make it optimal later. we can end 
up with this if numReplicas  numServers.
-1294for (RegionInfo region : 
lastFewRegions) {
-1295  int i = 
RANDOM.nextInt(numServers);
-1296  ServerName server = 
servers.get(i);
-1297  ListRegionInfo 
serverRegions = assignments.computeIfAbsent(server, k - new 
ArrayList());
-1298  serverRegions.add(region);
-1299  cluster.doAssignRegion(region, 
server);
-1300}
-1301return assignments;
-1302  }
-1303
-1304  protected Cluster 
createCluster(ListServerName servers, CollectionRegionInfo 
regions) {
-1305// Get the snapshot of the current 
assignments for the regions in question, and then create
-1306// a cluster out of it. Note that we 
might have replicas already assigned to some servers
-1307// earlier. So we want to get the 
snapshot to see those assignments, but this will only contain
-1308// replicas of the regions that are 
passed (for performance).
-1309MapServerName, 
ListRegionInfo clusterState = 
getRegionAssignmentsByServer(regions);
-1310
-1311for (ServerName server : servers) 
{
-1312  if 
(!clusterState.containsKey(server)) {
-1313clusterState.put(server, 
EMPTY_REGION_LIST);
-1314  }
-1315}
-1316return new Cluster(regions, 
clusterState, null, this.regionFinder,
-1317rackManager);
-1318  }
-1319
-1320  private ListServerName 
findIdleServers(ListServerName servers) {
-1321return 
this.services.getServerManager()
-1322
.getOnlineServersListWithPredicator(servers, IDLE_SERVER_PREDICATOR);
-1323  }
-1324
-1325  /**
-1326   * Used to assign a single region to a 
random server.
-1327   */
-1328  @Override
-1329  public ServerName 
randomAssignment(RegionInfo regionInfo, ListServerName servers)
-1330  throws HBaseIOException {
-1331
metricsBalancer.incrMiscInvocations();
-1332if (servers != null  
servers.contains(masterServerName)) {
-1333  if (shouldBeOnMaster(regionInfo)) 
{
-1334return masterServerName;
-1335  }
-1336  if 
(!LoadBalancer.isTablesOnMaster(getConf())) {
-1337// Guarantee we do not put any 
regions on master
-1338servers = new 
ArrayList(servers);
-1339
servers.remove(masterServerName);
-1340  }
-1341}
-1342
-1343int numServers = servers == null ? 0 
: servers.size();
-1344if (numServers == 0) {
-1345  LOG.warn("Wanted to retain 
assignment but no servers to assign to");
-1346  return null;
-1347}
-1348if (numServers == 1) { // Only one 
server, nothing fancy we can do here
-1349  return servers.get(0);
-1350}
-1351ListServerName idleServers = 
findIdleServers(servers);
-1352if (idleServers.size() == 1) {
-1353  return idleServers.get(0);
-1354}
-1355final ListServerName 
finalServers = idleServers.isEmpty() ?
-1356servers : idleServers;
-1357ListRegionInfo regions = 
Lists.newArrayList(regionInfo);
-1358Cluster cluster = 

[12/51] [partial] hbase-site git commit: Published site at 7c1fad4992a169a35b4457e6f4afcb30d04406e9.

2018-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
index d11176a..2c14c50 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
@@ -982,1050 +982,1168 @@
 974  }
 975
 976  /**
-977   * Add a new root-procedure to the 
executor.
-978   * @param proc the new procedure to 
execute.
-979   * @param nonceKey the registered 
unique identifier for this operation from the client or process.
-980   * @return the procedure id, that can 
be used to monitor the operation
-981   */
-982  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH",
-983  justification = "FindBugs is blind 
to the check-for-null")
-984  public long 
submitProcedure(ProcedureTEnvironment proc, NonceKey nonceKey) {
-985
Preconditions.checkArgument(lastProcId.get() = 0);
-986
-987prepareProcedure(proc);
-988
-989final Long currentProcId;
-990if (nonceKey != null) {
-991  currentProcId = 
nonceKeysToProcIdsMap.get(nonceKey);
-992  
Preconditions.checkArgument(currentProcId != null,
-993"Expected nonceKey=" + nonceKey + 
" to be reserved, use registerNonce(); proc=" + proc);
-994} else {
-995  currentProcId = nextProcId();
-996}
-997
-998// Initialize the procedure
-999proc.setNonceKey(nonceKey);
-1000
proc.setProcId(currentProcId.longValue());
-1001
-1002// Commit the transaction
-1003store.insert(proc, null);
-1004LOG.debug("Stored {}", proc);
-1005
-1006// Add the procedure to the 
executor
-1007return pushProcedure(proc);
-1008  }
-1009
-1010  /**
-1011   * Add a set of new root-procedure to 
the executor.
-1012   * @param procs the new procedures to 
execute.
-1013   */
-1014  // TODO: Do we need to take nonces 
here?
-1015  public void 
submitProcedures(ProcedureTEnvironment[] procs) {
-1016
Preconditions.checkArgument(lastProcId.get() = 0);
-1017if (procs == null || procs.length 
= 0) {
-1018  return;
-1019}
-1020
-1021// Prepare procedure
-1022for (int i = 0; i  procs.length; 
++i) {
-1023  
prepareProcedure(procs[i]).setProcId(nextProcId());
-1024}
-1025
-1026// Commit the transaction
-1027store.insert(procs);
-1028if (LOG.isDebugEnabled()) {
-1029  LOG.debug("Stored " + 
Arrays.toString(procs));
-1030}
-1031
-1032// Add the procedure to the 
executor
-1033for (int i = 0; i  procs.length; 
++i) {
-1034  pushProcedure(procs[i]);
-1035}
-1036  }
-1037
-1038  private ProcedureTEnvironment 
prepareProcedure(ProcedureTEnvironment proc) {
-1039
Preconditions.checkArgument(proc.getState() == ProcedureState.INITIALIZING);
-1040
Preconditions.checkArgument(!proc.hasParent(), "unexpected parent", proc);
-1041if (this.checkOwnerSet) {
-1042  
Preconditions.checkArgument(proc.hasOwner(), "missing owner");
-1043}
-1044return proc;
-1045  }
-1046
-1047  private long 
pushProcedure(ProcedureTEnvironment proc) {
-1048final long currentProcId = 
proc.getProcId();
+977   * Bypass a procedure. If the procedure 
is set to bypass, all the logic in
+978   * execute/rollback will be ignored and 
it will return success, whatever.
+979   * It is used to recover buggy stuck 
procedures, releasing the lock resources
+980   * and letting other procedures to run. 
Bypassing one procedure (and its ancestors will
+981   * be bypassed automatically) may leave 
the cluster in a middle state, e.g. region
+982   * not assigned, or some hdfs files 
left behind. After getting rid of those stuck procedures,
+983   * the operators may have to do some 
clean up on hdfs or schedule some assign procedures
+984   * to let region online. DO AT YOUR OWN 
RISK.
+985   * p
+986   * A procedure can be bypassed only 
if
+987   * 1. The procedure is in state of 
RUNNABLE, WAITING, WAITING_TIMEOUT
+988   * or it is a root procedure without 
any child.
+989   * 2. No other worker thread is 
executing it
+990   * 3. No child procedure has been 
submitted
+991   *
+992   * p
+993   * If all the requirements are meet, 
the procedure and its ancestors will be
+994   * bypassed and persisted to WAL.
+995   *
+996   * p
+997   * If the procedure is in WAITING 
state, will set it to RUNNABLE add it to run queue.
+998   * TODO: What about WAITING_TIMEOUT?
+999   * @param id the procedure id
+1000   * @param lockWait time to wait lock
+1001   * @param force if force set to true, 
we will bypass the procedure even if 

[12/51] [partial] hbase-site git commit: Published site at 3afe9fb7e6ebfa71187cbe131558a83fae61cecd.

2018-08-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/testdevapidocs/org/apache/hadoop/hbase/IntegrationTestingUtility.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/IntegrationTestingUtility.html 
b/testdevapidocs/org/apache/hadoop/hbase/IntegrationTestingUtility.html
index 094e374..88c97d8 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/IntegrationTestingUtility.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/IntegrationTestingUtility.html
@@ -285,7 +285,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.HBaseTestingUtility
-assertKVListsEqual,
 assertRegionOnlyOnServer,
 assertRegionOnServer,
 assignRegion,
 available,
 checksumRows,
 cleanupDataTestDirOnTestFS,
 cleanupDataTestDirOnTestFS,
 closeRegionAndWAL,
 closeRegionAndWAL,
 compact,
 compact,
 countRows,
 countRows,
 countRows,
 countRows,
 countRows,
 countRows,
 countRows,
 createLocalHRegion,
 createLocalHRegion,
 createLocalHRegion,
 createLocalHRegion,
 createLocalHRegio
 n, createLocalHRegionWithInMemoryFlags,
 createLocalHTU,
 createLocalHTU,
 createMockRegionServerService,
 createMockRegionServerService,
 createMockRegionServerService, createMultiRegionsInMeta,
 createMultiRegionsInMeta,
 createMultiRegionTable,
 createMultiRegionTable,
 createMultiRegionTable,
 createMultiRegionTable,
 createPreSplitLoadTestTable,
 createPreSplitLoadTestTable,
 createPreSplitLoadTestTable, createPreSplitLoadTestTable,
 createPreSplitLoadTestTable,
 createPreS
 plitLoadTestTable, createPreSplitLoadTestTable,
 createRandomTable,
 createRegionAndWAL,
 createRegionAndWAL,
 createRootDir,
 createRootDir,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable,
 createTable, createTable,
 createTable,
 createTableDescriptor,
 createTableDescriptor,
 createTableDescriptor,
 createTableDescriptor,
 createTableDescriptor,
 createTableDescriptor,
 createTestRegion,
 createWal,
 createWALRootDir,
 deleteNumericRows, deleteTable,
 deleteTableData,
 deleteTableIfAny,
 enableDebug,
 ensureSomeNonStoppedRegionServersAvailable,
 ensureSomeRegionServersAvailable,
 expireMasterSession, href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html#expireRegionServerSession-int-">expireRegionServerSession,
 > href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html#expireSession-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">expireSession,
 > href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html#expireSession-org.apache.hadoop.hbase.zookeeper.ZKWatcher-boolean-">expireSession,
 > href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html#explainTableAvailability-org.apache.hadoop.hbase.TableName-">explainTableAvailability,
 > href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html#explainTableState-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.TableState.State-">explainTableState,
 > href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html#findLastTableState-org.apache.hadoop.hbase.TableName-">findLastTableState,
 > flush, flush,
 generateColumnDescriptors,
 generateColumnDescriptors,
 getAdmin,
 getAllOnlineRegions,
 getClosestRowBefore,
 getClusterKey,
 getConfiguration,
 getConnection,
 getDataTestDirOnTestFS,
 getDataTestDirOnTestFS,
 getDefaultRootDirPath,
 getDefaultRootDirPath,
 getDFSCluster,
 getDifferentUser,
 getFromStoreFile,
 getFromStoreFile,
 getHBaseAdmin,
 getHBaseCluster,
 getHBaseClusterInterface,
 getMetaRSPort,
 getMetaTableDescriptor,
 getMetaTableDescriptorBuilder,
 getMetaTableRows,
 getMetaTableRows,
 getMiniHBaseCluster,
 getNumHFiles,
 getNumHFilesForRS,
 getOtherRegion
 Server, getRegionSplitStartKeys,
 getRSForFirstRegionInTable,
 getSplittableRegion,
 getSupportedCompressionAlgorithms,
 getTestFileSystem,
 isReadShortCircuitOn,
 lo
 adNumericRows, loadRandomRows,
 loadRegion,
 loadRegion,
 loadRegion,
 loadTable,
 loadTable,
 loadTable, loadTable,
 loadTable,
 memStoreTSTagsAndOffheapCombination,
 modifyTableSync,
 moveRegionAndWait,
 predicateNoRegionsInTransition, predicateTableAvailable,
 predicateTableDisabled,
 predicateTableEnabled,
 randomFreePort,
 randomMultiCastAddress,
 restartHBaseCluster,
 safeGetAsSt
 r, setDFSCluster,
 setDFSCluster,
 setFileSystemURI,
 setHBaseCluster,
 setMaxRecoveryErrorCount,
 setReplicas,
 setu
 pDataTestDir, setupMiniKdc,
 shutdownMiniCluster,
 shutdownMiniDFSCluster,
 shutdownMiniHBaseCluster,
 shutdownMiniMapReduceCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniCluster,
 startMiniDFSCluster,
 startMiniDFSCluster,
 startMiniDFSCluster,
 startMiniDFSCluster,
 

[12/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
index ee6b5ad..919255b 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/RandomRowFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class RandomRowFilter
+public class RandomRowFilter
 extends FilterBase
 A filter that includes rows based on a chance.
 
@@ -217,24 +217,28 @@ extends 
 
 boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
+boolean
 filterAllRemaining()
 Filters that never filter all remaining can inherit this 
implementation that
  never stops the filter early.
 
 
-
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -242,41 +246,45 @@ extends 
 
 
-
+
 boolean
 filterRowKey(CellfirstRowCell)
 Filters a row based on the row key.
 
 
-
+
 float
 getChance()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 static RandomRowFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setChance(floatchance)
 Set the chance that a row is included.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
@@ -302,7 +310,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notifyAll--;
 title="class or interface in java.lang">notifyAll, 

[12/51] [partial] hbase-site git commit: Published site at 6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4.

2018-08-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
index 2709ea3..4a11f27 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.PostOpenDeployTasksThread.html
@@ -37,309 +37,299 @@
 029import 
org.apache.hadoop.hbase.executor.EventType;
 030import 
org.apache.hadoop.hbase.regionserver.HRegion;
 031import 
org.apache.hadoop.hbase.regionserver.Region;
-032import 
org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
-033import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
-034import 
org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext;
-035import 
org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext;
-036import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import org.slf4j.Logger;
-039import org.slf4j.LoggerFactory;
-040import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-041/**
-042 * Handles opening of a region on a 
region server.
-043 * p
-044 * This is executed after receiving an 
OPEN RPC from the master or client.
-045 */
-046@InterfaceAudience.Private
-047public class OpenRegionHandler extends 
EventHandler {
-048  private static final Logger LOG = 
LoggerFactory.getLogger(OpenRegionHandler.class);
-049
-050  protected final RegionServerServices 
rsServices;
-051
-052  private final RegionInfo regionInfo;
-053  private final TableDescriptor htd;
-054  private final long masterSystemTime;
-055
-056  public OpenRegionHandler(final Server 
server,
-057  final RegionServerServices 
rsServices, RegionInfo regionInfo,
-058  TableDescriptor htd, long 
masterSystemTime) {
-059this(server, rsServices, regionInfo, 
htd, masterSystemTime, EventType.M_RS_OPEN_REGION);
-060  }
-061
-062  protected OpenRegionHandler(final 
Server server,
-063  final 
RegionServerServices rsServices, final RegionInfo regionInfo,
-064  final 
TableDescriptor htd, long masterSystemTime, EventType eventType) {
-065super(server, eventType);
-066this.rsServices = rsServices;
-067this.regionInfo = regionInfo;
-068this.htd = htd;
-069this.masterSystemTime = 
masterSystemTime;
-070  }
-071
-072  public RegionInfo getRegionInfo() {
-073return regionInfo;
-074  }
-075
-076  @Override
-077  public void process() throws 
IOException {
-078boolean openSuccessful = false;
-079final String regionName = 
regionInfo.getRegionNameAsString();
-080HRegion region = null;
-081
-082try {
-083  if (this.server.isStopped() || 
this.rsServices.isStopping()) {
-084return;
-085  }
-086  final String encodedName = 
regionInfo.getEncodedName();
-087
-088  // 2 different difficult situations 
can occur
-089  // 1) The opening was cancelled. 
This is an expected situation
-090  // 2) The region is now marked as 
online while we're suppose to open. This would be a bug.
-091
-092  // Check that this region is not 
already online
-093  if 
(this.rsServices.getRegion(encodedName) != null) {
-094LOG.error("Region " + encodedName 
+
-095" was already online when we 
started processing the opening. " +
-096"Marking this new attempt as 
failed");
-097return;
-098  }
-099
-100  // Check that we're still supposed 
to open the region.
-101  // If fails, just return.  Someone 
stole the region from under us.
-102  if (!isRegionStillOpening()){
-103LOG.error("Region " + encodedName 
+ " opening cancelled");
-104return;
-105  }
-106
-107  // Open region.  After a successful 
open, failures in subsequent
-108  // processing needs to do a close 
as part of cleanup.
-109  region = openRegion();
-110  if (region == null) {
-111return;
-112  }
-113
-114  if (!updateMeta(region, 
masterSystemTime) || this.server.isStopped() ||
-115  this.rsServices.isStopping()) 
{
-116return;
-117  }
-118
-119  if (!isRegionStillOpening()) {
-120return;
-121  }
-122
-123  // Successful region open, and add 
it to MutableOnlineRegions
-124  
this.rsServices.addRegion(region);
-125  openSuccessful = true;
-126
-127  // Done!  Successful region open
-128  LOG.debug("Opened " + 

[12/51] [partial] hbase-site git commit: Published site at 63f2d3cbdc8151f5f61f33e0a078c51b9ac076a5.

2018-08-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.html
index 9e5ca1f..416b70e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.html
@@ -101,7 +101,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Known Implementing Classes:
-AbstractStateMachineNamespaceProcedure,
 AbstractStateMachineRegionProcedure,
 AbstractStateMachineTableProcedure,
 AssignProcedure, CloneSnapshotProcedure, CreateNamespaceProcedure, CreateTableProcedure, DeleteNamespaceProcedure, DeleteTableProcedure, DisableTableProcedure, EnableTableProcedure, GCMergedRegionsProcedure, GCRegionProcedure, InitMetaProcedure, LockProcedure, MergeTableRegionsProcedure, ModifyNamespaceProcedure, ModifyTableProcedure, MoveRegionProcedure, RegionTransitionProcedure, ReopenTableRegionsProcedure, RestoreSnapshotProcedure, SplitTableRegionProcedure, TruncateTableProcedure, UnassignProcedure
+AbstractStateMachineNamespaceProcedure,
 AbstractStateMachineRegionProcedure,
 AbstractStateMachineTableProcedure,
 AssignProcedure, CloneSnapshotProcedure, CloseRegionProcedure, CreateNamespaceProcedure, CreateTableProcedure, DeleteNamespaceProcedure, DeleteTableProcedure, DisableTableProcedure, EnableTableProcedure, GCMergedRegionsProcedure, GCRegionProcedure, InitMetaProcedure, LockProcedure, MergeTableRegionsProcedure, ModifyNamespaceProcedure, ModifyTableProcedure, MoveRegionProcedure, OpenRegionProcedure, RegionRemoteProcedureBase, RegionTransitionProcedure, ReopenTableRegionsProcedure, RestoreSnapshotProcedure, SplitTableRegionProcedure, TransitRegionStateProcedure, TruncateTableProcedure, UnassignProcedure
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
index bb7d09a..7c4a28d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -309,42 +309,48 @@ extends 
 protected boolean
+holdLock(MasterProcedureEnvenv)
+Used to keep the procedure lock even when the procedure is 
yielding or suspended.
+
+
+
+protected boolean
 isRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStatestate)
 Used by the default implementation of abort() to know if 
the current state can be aborted
  and rollback can be triggered.
 
 
-
+
 private void
 postTruncate(MasterProcedureEnvenv)
 
-
+
 private boolean
 prepareTruncate(MasterProcedureEnvenv)
 
-
+
 private boolean
 preTruncate(MasterProcedureEnvenv)
 
-
+
 private static https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 recreateRegionInfo(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforegions)
 
-
+
 protected void
 rollbackState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStatestate)
 called to perform the rollback of the specified state
 
 
-
+
 protected void
 serializeStateData(ProcedureStateSerializerserializer)
 The user-level code of the procedure may have some state to
  persist (e.g.
 
 
-
+
 void
 toStringClassDetails(https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
 Extend the toString() information with the procedure details
@@ -371,7 +377,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 doExecute,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 

[12/51] [partial] hbase-site git commit: Published site at 092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.

2018-08-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
index 63e4b46..514f830 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
@@ -468,15 +468,15 @@
 460   * creating it if necessary.
 461   * @param logEntry
 462   * @param fileNameBeingSplit the file 
being split currently. Used to generate tmp file name.
-463   * @param conf
-464   * @return Path to file into which to 
dump split log edits.
-465   * @throws IOException
-466   */
-467  @SuppressWarnings("deprecation")
-468  @VisibleForTesting
-469  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
-470  Configuration conf)
-471  throws IOException {
+463   * @param tmpDirName of the directory 
used to sideline old recovered edits file
+464   * @param conf
+465   * @return Path to file into which to 
dump split log edits.
+466   * @throws IOException
+467   */
+468  @SuppressWarnings("deprecation")
+469  @VisibleForTesting
+470  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
+471  String tmpDirName, Configuration 
conf) throws IOException {
 472FileSystem fs = 
FileSystem.get(conf);
 473Path rootDir = 
FSUtils.getRootDir(conf);
 474Path tableDir = 
FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName());
@@ -491,7 +491,7 @@
 483  return null;
 484}
 485if (fs.exists(dir)  
fs.isFile(dir)) {
-486  Path tmp = new Path("/tmp");
+486  Path tmp = new Path(tmpDirName);
 487  if (!fs.exists(tmp)) {
 488fs.mkdirs(tmp);
 489  }
@@ -1520,411 +1520,413 @@
 1512 * @return a path with a write for 
that path. caller should close.
 1513 */
 1514WriterAndPath createWAP(byte[] 
region, Entry entry) throws IOException {
-1515  Path regionedits = 
getRegionSplitEditsPath(entry,
-1516  
fileBeingSplit.getPath().getName(), conf);
-1517  if (regionedits == null) {
-1518return null;
-1519  }
-1520  FileSystem rootFs = 
FileSystem.get(conf);
-1521  if (rootFs.exists(regionedits)) 
{
-1522LOG.warn("Found old edits file. 
It could be the "
-1523+ "result of a previous 
failed split attempt. Deleting " + regionedits + ", length="
-1524+ 
rootFs.getFileStatus(regionedits).getLen());
-1525if (!rootFs.delete(regionedits, 
false)) {
-1526  LOG.warn("Failed delete of old 
{}", regionedits);
-1527}
-1528  }
-1529  Writer w = 
createWriter(regionedits);
-1530  LOG.debug("Creating writer 
path={}", regionedits);
-1531  return new 
WriterAndPath(regionedits, w, entry.getKey().getSequenceId());
-1532}
-1533
-1534void filterCellByStore(Entry 
logEntry) {
-1535  Mapbyte[], Long 
maxSeqIdInStores =
-1536  
regionMaxSeqIdInStores.get(Bytes.toString(logEntry.getKey().getEncodedRegionName()));
-1537  if 
(MapUtils.isEmpty(maxSeqIdInStores)) {
-1538return;
-1539  }
-1540  // Create the array list for the 
cells that aren't filtered.
-1541  // We make the assumption that 
most cells will be kept.
-1542  ArrayListCell keptCells = 
new ArrayList(logEntry.getEdit().getCells().size());
-1543  for (Cell cell : 
logEntry.getEdit().getCells()) {
-1544if 
(CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
-1545  keptCells.add(cell);
-1546} else {
-1547  byte[] family = 
CellUtil.cloneFamily(cell);
-1548  Long maxSeqId = 
maxSeqIdInStores.get(family);
-1549  // Do not skip cell even if 
maxSeqId is null. Maybe we are in a rolling upgrade,
-1550  // or the master was crashed 
before and we can not get the information.
-1551  if (maxSeqId == null || 
maxSeqId.longValue()  logEntry.getKey().getSequenceId()) {
-1552keptCells.add(cell);
-1553  }
-1554}
-1555  }
-1556
-1557  // Anything in the keptCells array 
list is still live.
-1558  // So rather than removing the 
cells from the array list
-1559  // which would be an O(n^2) 
operation, we just replace the list
-1560  
logEntry.getEdit().setCells(keptCells);
-1561}
-1562
-1563@Override
-1564public void append(RegionEntryBuffer 
buffer) throws IOException {
-1565  appendBuffer(buffer, true);
-1566}
-1567
-1568WriterAndPath 
appendBuffer(RegionEntryBuffer buffer, boolean reusable) throws IOException{
-1569  ListEntry entries = 
buffer.entryBuffer;
-1570  if (entries.isEmpty()) {
-1571LOG.warn("got an empty buffer, 
skipping");
-1572

[12/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

2018-08-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
index f2fd195..b293714 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
@@ -619,1696 +619,1698 @@
 611try {
 612  long procId =
 613  
master.createTable(tableDescriptor, splitKeys, req.getNonceGroup(), 
req.getNonce());
-614  return 
CreateTableResponse.newBuilder().setProcId(procId).build();
-615} catch (IOException ioe) {
-616  throw new ServiceException(ioe);
-617}
-618  }
-619
-620  @Override
-621  public DeleteColumnResponse 
deleteColumn(RpcController controller,
-622  DeleteColumnRequest req) throws 
ServiceException {
-623try {
-624  long procId = 
master.deleteColumn(
-625
ProtobufUtil.toTableName(req.getTableName()),
-626
req.getColumnName().toByteArray(),
-627req.getNonceGroup(),
-628req.getNonce());
-629  if (procId == -1) {
-630// This mean operation was not 
performed in server, so do not set any procId
-631return 
DeleteColumnResponse.newBuilder().build();
-632  } else {
-633return 
DeleteColumnResponse.newBuilder().setProcId(procId).build();
-634  }
-635} catch (IOException ioe) {
-636  throw new ServiceException(ioe);
-637}
-638  }
-639
-640  @Override
-641  public DeleteNamespaceResponse 
deleteNamespace(RpcController controller,
-642  DeleteNamespaceRequest request) 
throws ServiceException {
-643try {
-644  long procId = 
master.deleteNamespace(
-645request.getNamespaceName(),
-646request.getNonceGroup(),
-647request.getNonce());
-648  return 
DeleteNamespaceResponse.newBuilder().setProcId(procId).build();
-649} catch (IOException e) {
-650  throw new ServiceException(e);
-651}
-652  }
-653
-654  /**
-655   * Execute Delete Snapshot operation.
-656   * @return DeleteSnapshotResponse (a 
protobuf wrapped void) if the snapshot existed and was
-657   *deleted properly.
-658   * @throws ServiceException wrapping 
SnapshotDoesNotExistException if specified snapshot did not
-659   *exist.
-660   */
-661  @Override
-662  public DeleteSnapshotResponse 
deleteSnapshot(RpcController controller,
-663  DeleteSnapshotRequest request) 
throws ServiceException {
-664try {
-665  master.checkInitialized();
-666  
master.snapshotManager.checkSnapshotSupport();
-667
-668  
LOG.info(master.getClientIdAuditPrefix() + " delete " + 
request.getSnapshot());
-669  
master.snapshotManager.deleteSnapshot(request.getSnapshot());
-670  return 
DeleteSnapshotResponse.newBuilder().build();
-671} catch (IOException e) {
-672  throw new ServiceException(e);
-673}
-674  }
-675
-676  @Override
-677  public DeleteTableResponse 
deleteTable(RpcController controller,
-678  DeleteTableRequest request) throws 
ServiceException {
-679try {
-680  long procId = 
master.deleteTable(ProtobufUtil.toTableName(
-681  request.getTableName()), 
request.getNonceGroup(), request.getNonce());
-682  return 
DeleteTableResponse.newBuilder().setProcId(procId).build();
-683} catch (IOException ioe) {
-684  throw new ServiceException(ioe);
-685}
-686  }
-687
-688  @Override
-689  public TruncateTableResponse 
truncateTable(RpcController controller, TruncateTableRequest request)
-690  throws ServiceException {
-691try {
-692  long procId = 
master.truncateTable(
-693
ProtobufUtil.toTableName(request.getTableName()),
-694request.getPreserveSplits(),
-695request.getNonceGroup(),
-696request.getNonce());
-697  return 
TruncateTableResponse.newBuilder().setProcId(procId).build();
-698} catch (IOException ioe) {
-699  throw new ServiceException(ioe);
-700}
-701  }
-702
-703  @Override
-704  public DisableTableResponse 
disableTable(RpcController controller,
-705  DisableTableRequest request) throws 
ServiceException {
-706try {
-707  long procId = 
master.disableTable(
-708
ProtobufUtil.toTableName(request.getTableName()),
-709request.getNonceGroup(),
-710request.getNonce());
-711  return 
DisableTableResponse.newBuilder().setProcId(procId).build();
-712} catch (IOException ioe) {
-713  throw new ServiceException(ioe);
-714}
-715  }
-716
-717  @Override
-718  public EnableCatalogJanitorResponse 
enableCatalogJanitor(RpcController c,
-719  EnableCatalogJanitorRequest req) 
throws ServiceException {
-720

[12/51] [partial] hbase-site git commit: Published site at ba5d1c1f28301adc99019d9d6c4a04fac98ae511.

2018-07-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index c586a97..398cbf4 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -514,14 +514,14 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.util.PoolMap.PoolType
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
-org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
+org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.Order
+org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
 org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
+org.apache.hadoop.hbase.util.PoolMap.PoolType
 org.apache.hadoop.hbase.util.ChecksumType
-org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index e89778a..2f9ca85 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -340,6 +340,7 @@
 org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupCopyJob.SnapshotCopy
 
 
+org.apache.hadoop.hbase.tool.HFileContentValidator
 org.apache.hadoop.hbase.util.RegionMover
 org.apache.hadoop.hbase.backup.RestoreDriver
 org.apache.hadoop.hbase.snapshot.SnapshotInfo
@@ -1442,7 +1443,7 @@
 
 org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestampT
 
-org.apache.hadoop.hbase.procedure2.DelayedProcedure
+org.apache.hadoop.hbase.procedure2.DelayedProcedureTEnvironment
 org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.BufferNode 
(implements org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteNodeTEnv,TRemote)
 org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.DelayedTask
 
@@ -2735,7 +2736,7 @@
 
 
 org.apache.hadoop.hbase.procedure2.ProcedureExecutorTEnvironment
-org.apache.hadoop.hbase.procedure2.ProcedureExecutor.CompletedProcedureRetainer
+org.apache.hadoop.hbase.procedure2.ProcedureExecutor.CompletedProcedureRetainerTEnvironment
 org.apache.hadoop.hbase.procedure2.ProcedureExecutor.Testing
 org.apache.hadoop.hbase.procedure.ProcedureManager
 
@@ -3178,7 +3179,7 @@
 org.apache.hadoop.hbase.client.RetryingTimeTracker
 org.apache.hadoop.hbase.regionserver.wal.RingBufferTruck
 org.apache.hadoop.hbase.util.RollingStatCalculator
-org.apache.hadoop.hbase.procedure2.RootProcedureState
+org.apache.hadoop.hbase.procedure2.RootProcedureStateTEnvironment
 org.apache.hadoop.hbase.mapred.RowCounter.RowCounterMapper (implements 
org.apache.hadoop.hbase.mapred.TableMapK,V)
 org.apache.hadoop.hbase.io.encoding.RowIndexEncoderV1
 org.apache.hadoop.hbase.io.encoding.RowIndexSeekerV1.SeekerState
@@ -3782,7 +3783,7 @@
 org.apache.hadoop.hbase.procedure2.ProcedureExecutor.KeepAliveWorkerThread
 
 
-org.apache.hadoop.hbase.procedure2.TimeoutExecutorThread
+org.apache.hadoop.hbase.procedure2.TimeoutExecutorThreadTEnvironment
 
 
 org.apache.hadoop.hbase.wal.WALSplitter.WriterThread
@@ -3862,6 +3863,7 @@
 org.apache.hadoop.hbase.ipc.CellScannerButNoCodecException
 org.apache.hadoop.hbase.master.ClusterSchemaException
 org.apache.hadoop.hbase.codec.CodecException
+org.apache.hadoop.hbase.exceptions.ConnectionClosedException
 org.apache.hadoop.hbase.procedure2.store.wal.CorruptedWALProcedureStoreException
 org.apache.hadoop.hbase.regionserver.wal.DamagedWALException
 org.apache.hadoop.hbase.DoNotRetryIOException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/serialized-form.html
--
diff --git a/devapidocs/serialized-form.html b/devapidocs/serialized-form.html
index 1202851..698a2f9 100644
--- a/devapidocs/serialized-form.html
+++ b/devapidocs/serialized-form.html
@@ -560,6 +560,15 @@
 
 Packageorg.apache.hadoop.hbase.exceptions
 
+
+
+
+Class org.apache.hadoop.hbase.exceptions.ConnectionClosedException
 extends HBaseIOException implements 

[12/51] [partial] hbase-site git commit: Published site at b4759ce6e72f50ccd9d410bd5917dc5a515414f1.

2018-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.html
index 233dba3..91b9055 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.html
@@ -540,1205 +540,1204 @@
 532  
sm.getRegionMetrics().forEach((byte[] regionName, RegionMetrics rm) - {
 533DequeBalancerRegionLoad 
rLoads = oldLoads.get(Bytes.toString(regionName));
 534if (rLoads == null) {
-535  // There was nothing there
-536  rLoads = new 
ArrayDeque();
-537} else if (rLoads.size() = 
numRegionLoadsToRemember) {
-538  rLoads.remove();
-539}
-540rLoads.add(new 
BalancerRegionLoad(rm));
-541
loads.put(Bytes.toString(regionName), rLoads);
-542  });
-543});
-544
-545for(CostFromRegionLoadFunction cost : 
regionLoadFunctions) {
-546  cost.setLoads(loads);
-547}
-548  }
-549
-550  protected void initCosts(Cluster 
cluster) {
-551for (CostFunction c:costFunctions) 
{
-552  c.init(cluster);
-553}
-554  }
-555
-556  protected void 
updateCostsWithAction(Cluster cluster, Action action) {
-557for (CostFunction c : costFunctions) 
{
-558  c.postAction(action);
-559}
-560  }
-561
-562  /**
-563   * Get the names of the cost 
functions
-564   */
-565  public String[] getCostFunctionNames() 
{
-566if (costFunctions == null) return 
null;
-567String[] ret = new 
String[costFunctions.length];
-568for (int i = 0; i  
costFunctions.length; i++) {
-569  CostFunction c = 
costFunctions[i];
-570  ret[i] = 
c.getClass().getSimpleName();
-571}
-572
-573return ret;
-574  }
-575
-576  /**
-577   * This is the main cost function.  It 
will compute a cost associated with a proposed cluster
-578   * state.  All different costs will be 
combined with their multipliers to produce a double cost.
-579   *
-580   * @param cluster The state of the 
cluster
-581   * @param previousCost the previous 
cost. This is used as an early out.
-582   * @return a double of a cost 
associated with the proposed cluster state.  This cost is an
-583   * aggregate of all individual 
cost functions.
-584   */
-585  protected double computeCost(Cluster 
cluster, double previousCost) {
-586double total = 0;
-587
-588for (int i = 0; i  
costFunctions.length; i++) {
-589  CostFunction c = 
costFunctions[i];
-590  this.tempFunctionCosts[i] = 0.0;
-591
-592  if (c.getMultiplier() = 0) {
-593continue;
-594  }
-595
-596  Float multiplier = 
c.getMultiplier();
-597  Double cost = c.cost();
-598
-599  this.tempFunctionCosts[i] = 
multiplier*cost;
-600  total += 
this.tempFunctionCosts[i];
-601
-602  if (total  previousCost) {
-603break;
-604  }
-605}
-606
-607return total;
-608  }
-609
-610  /** Generates a candidate action to be 
applied to the cluster for cost function search */
-611  abstract static class 
CandidateGenerator {
-612abstract Cluster.Action 
generate(Cluster cluster);
-613
-614/**
-615 * From a list of regions pick a 
random one. Null can be returned which
-616 * {@link 
StochasticLoadBalancer#balanceCluster(Map)} recognize as signal to try a region 
move
-617 * rather than swap.
-618 *
-619 * @param clusterThe state of 
the cluster
-620 * @param server index of the 
server
-621 * @param chanceOfNoSwap Chance that 
this will decide to try a move rather
-622 *   than a 
swap.
-623 * @return a random {@link 
RegionInfo} or null if an asymmetrical move is
-624 * suggested.
-625 */
-626protected int 
pickRandomRegion(Cluster cluster, int server, double chanceOfNoSwap) {
-627  // Check to see if this is just a 
move.
-628  if 
(cluster.regionsPerServer[server].length == 0 || RANDOM.nextFloat()  
chanceOfNoSwap) {
-629// signal a move only.
-630return -1;
-631  }
-632  int rand = 
RANDOM.nextInt(cluster.regionsPerServer[server].length);
-633  return 
cluster.regionsPerServer[server][rand];
-634
-635}
-636protected int 
pickRandomServer(Cluster cluster) {
-637  if (cluster.numServers  1) {
-638return -1;
-639  }
-640
-641  return 
RANDOM.nextInt(cluster.numServers);
-642}
-643
-644protected int pickRandomRack(Cluster 
cluster) {
-645  

[12/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0c6f447e/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html 
b/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
index 63a5803..215bfca 100644
--- a/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
+++ b/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -20,38 +20,38 @@
 //-->
 
 
-您的浏览器已禁用 JavaScript。
+JavaScript is disabled on your browser.
 
 
 
 
 
-跳过导航链接
+Skip navigation links
 
 
 
-
-概览
-程序包
-ç±»
-使用
-树
-已过时
-索引
-帮助
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
 
 
 
 
-上一个类
-下一个类
+PrevClass
+NextClass
 
 
-框架
-无框架
+Frames
+NoFrames
 
 
-所有类
+AllClasses
 
 
 
 
org.apache.hadoop.hbase
-

ç±» UnknownRegionException

+

Class UnknownRegionException

    -
  • https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="java.lang中的类或接口">java.lang.Object
  • +
  • https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true"; title="class or interface in java.lang">java.lang.Object
    • -
    • https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true"; title="java.lang中的类或接口">java.lang.Throwable
    • +
    • https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true"; title="class or interface in java.lang">java.lang.Throwable
      • -
      • https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true"; title="java.lang中的类或接口">java.lang.Exception
      • +
      • https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true"; title="class or interface in java.lang">java.lang.Exception
        • -
        • https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="java.io中的类或接口">java.io.IOException
        • +
        • https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true"; title="class or interface in java.io">java.io.IOException
          • -
          • org.apache.hadoop.hbase.HBaseIOException
          • +
          • [12/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5427a45e/apidocs/org/apache/hadoop/hbase/TableNotDisabledException.html
            --
            diff --git a/apidocs/org/apache/hadoop/hbase/TableNotDisabledException.html 
            b/apidocs/org/apache/hadoop/hbase/TableNotDisabledException.html
            index 4c5a7f0..0a51c6a 100644
            --- a/apidocs/org/apache/hadoop/hbase/TableNotDisabledException.html
            +++ b/apidocs/org/apache/hadoop/hbase/TableNotDisabledException.html
            @@ -1,6 +1,6 @@
             http://www.w3.org/TR/html4/loose.dtd;>
             
            -
            +
             
             
             
            @@ -20,38 +20,38 @@
             //-->
             
             
            -JavaScript is disabled on your browser.
            +您的浏览器已禁用 JavaScript。
             
             
             
             
             
            -Skip navigation links
            +跳过导航链接
             
             
             
            -
            -Overview
            -Package
            -Class
            -Use
            -Tree
            -Deprecated
            -Index
            -Help
            +
            +概览
            +程序包
            +ç±»
            +使用
            +树
            +已过时
            +索引
            +帮助
             
             
             
             
            -PrevClass
            -NextClass
            +上一个类
            +下一个类
             
             
            -Frames
            -NoFrames
            +框架
            +无框架
             
             
            -AllClasses
            +所有类
             
             
             
             
            org.apache.hadoop.hbase
            -

            Class TableNotDisabledException

            +

            ç±» TableNotDisabledException


            [12/51] [partial] hbase-site git commit: Published site at 0f23784182ab88649de340d75804e0ff20dcd0fc.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
            index 05e032c..40ef9f4 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
            @@ -25,767 +25,805 @@
             017 */
             018package 
            org.apache.hadoop.hbase.io.asyncfs;
             019
            -020import static 
            org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
            -021import static 
            org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
            +020import static 
            org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
            +021import static 
            org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
             022
            -023import 
            org.apache.hbase.thirdparty.com.google.common.base.Charsets;
            -024import 
            org.apache.hbase.thirdparty.com.google.common.base.Throwables;
            -025import 
            org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
            -026import 
            org.apache.hbase.thirdparty.com.google.common.collect.Maps;
            -027import 
            com.google.protobuf.CodedOutputStream;
            -028
            -029import 
            org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
            -030import 
            org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;
            -031import 
            org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf;
            -032import 
            org.apache.hbase.thirdparty.io.netty.buffer.Unpooled;
            -033import 
            org.apache.hbase.thirdparty.io.netty.channel.Channel;
            -034import 
            org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler;
            -035import 
            org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;
            -036import 
            org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapter;
            -037import 
            org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;
            -038import 
            org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise;
            -039import 
            org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;
            -040import 
            org.apache.hbase.thirdparty.io.netty.handler.codec.LengthFieldBasedFrameDecoder;
            -041import 
            org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder;
            -042import 
            org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;
            -043import 
            org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
            -044import 
            org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;
            -045import 
            org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;
            -046import 
            org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;
            -047
            -048import java.io.IOException;
            -049import java.lang.reflect.Field;
            -050import 
            java.lang.reflect.InvocationTargetException;
            -051import java.lang.reflect.Method;
            -052import java.net.InetAddress;
            -053import java.net.InetSocketAddress;
            -054import java.nio.ByteBuffer;
            -055import 
            java.security.GeneralSecurityException;
            -056import java.util.Arrays;
            -057import java.util.Collections;
            -058import java.util.List;
            -059import java.util.Map;
            -060import java.util.Set;
            -061import java.util.concurrent.TimeUnit;
            -062import 
            java.util.concurrent.atomic.AtomicBoolean;
            -063
            -064import 
            javax.security.auth.callback.Callback;
            -065import 
            javax.security.auth.callback.CallbackHandler;
            -066import 
            javax.security.auth.callback.NameCallback;
            -067import 
            javax.security.auth.callback.PasswordCallback;
            -068import 
            javax.security.auth.callback.UnsupportedCallbackException;
            -069import 
            javax.security.sasl.RealmCallback;
            -070import 
            javax.security.sasl.RealmChoiceCallback;
            -071import javax.security.sasl.Sasl;
            -072import javax.security.sasl.SaslClient;
            -073import 
            javax.security.sasl.SaslException;
            -074
            -075import 
            org.apache.commons.codec.binary.Base64;
            -076import 
            org.apache.commons.lang3.StringUtils;
            -077import 
            org.apache.hadoop.conf.Configuration;
            -078import 
            org.apache.hadoop.crypto.CipherOption;
            -079import 
            org.apache.hadoop.crypto.CipherSuite;
            -080import 
            org.apache.hadoop.crypto.CryptoCodec;
            -081import 
            org.apache.hadoop.crypto.Decryptor;
            -082import 
            org.apache.hadoop.crypto.Encryptor;
            -083import 
            org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
            -084import 
            org.apache.hadoop.fs.FileEncryptionInfo;
            -085import 
            org.apache.yetus.audience.InterfaceAudience;
            -086import org.slf4j.Logger;
            -087import org.slf4j.LoggerFactory;
            -088
            -089import com.google.protobuf.ByteString;
            -090import 
            org.apache.hadoop.hdfs.DFSClient;
            -091import 
            org.apache.hadoop.hdfs.protocol.DatanodeInfo;
            -092import 
            

            [12/51] [partial] hbase-site git commit: Published site at 85b41f36e01214b6485c9352875c84ebf877dab3.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a5c66de0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
            index c10cfbf..a3e2f4a 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
            @@ -3371,7 +3371,7 @@
             3363private V result = null;
             3364
             3365private final HBaseAdmin admin;
            -3366private final Long procId;
            +3366protected final Long procId;
             3367
             3368public ProcedureFuture(final 
            HBaseAdmin admin, final Long procId) {
             3369  this.admin = admin;
            @@ -3653,653 +3653,651 @@
             3645 * @return a description of the 
            operation
             3646 */
             3647protected String getDescription() 
            {
            -3648  return "Operation: " + 
            getOperationType() + ", "
            -3649  + "Table Name: " + 
            tableName.getNameWithNamespaceInclAsString();
            -3650
            -3651}
            -3652
            -3653protected abstract class 
            TableWaitForStateCallable implements WaitForStateCallable {
            -3654  @Override
            -3655  public void 
            throwInterruptedException() throws InterruptedIOException {
            -3656throw new 
            InterruptedIOException("Interrupted while waiting for operation: "
            -3657+ getOperationType() + " on 
            table: " + tableName.getNameWithNamespaceInclAsString());
            -3658  }
            -3659
            -3660  @Override
            -3661  public void 
            throwTimeoutException(long elapsedTime) throws TimeoutException {
            -3662throw new TimeoutException("The 
            operation: " + getOperationType() + " on table: " +
            -3663tableName.getNameAsString() 
            + " has not completed after " + elapsedTime + "ms");
            -3664  }
            -3665}
            -3666
            -3667@Override
            -3668protected V 
            postOperationResult(final V result, final long deadlineTs)
            -3669throws IOException, 
            TimeoutException {
            -3670  LOG.info(getDescription() + " 
            completed");
            -3671  return 
            super.postOperationResult(result, deadlineTs);
            -3672}
            -3673
            -3674@Override
            -3675protected V 
            postOperationFailure(final IOException exception, final long deadlineTs)
            -3676throws IOException, 
            TimeoutException {
            -3677  LOG.info(getDescription() + " 
            failed with " + exception.getMessage());
            -3678  return 
            super.postOperationFailure(exception, deadlineTs);
            -3679}
            -3680
            -3681protected void 
            waitForTableEnabled(final long deadlineTs)
            -3682throws IOException, 
            TimeoutException {
            -3683  waitForState(deadlineTs, new 
            TableWaitForStateCallable() {
            -3684@Override
            -3685public boolean checkState(int 
            tries) throws IOException {
            -3686  try {
            -3687if 
            (getAdmin().isTableAvailable(tableName)) {
            -3688  return true;
            -3689}
            -3690  } catch 
            (TableNotFoundException tnfe) {
            -3691LOG.debug("Table " + 
            tableName.getNameWithNamespaceInclAsString()
            -3692+ " was not enabled, 
            sleeping. tries=" + tries);
            -3693  }
            -3694  return false;
            -3695}
            -3696  });
            -3697}
            -3698
            -3699protected void 
            waitForTableDisabled(final long deadlineTs)
            -3700throws IOException, 
            TimeoutException {
            -3701  waitForState(deadlineTs, new 
            TableWaitForStateCallable() {
            -3702@Override
            -3703public boolean checkState(int 
            tries) throws IOException {
            -3704  return 
            getAdmin().isTableDisabled(tableName);
            -3705}
            -3706  });
            -3707}
            -3708
            -3709protected void 
            waitTableNotFound(final long deadlineTs)
            -3710throws IOException, 
            TimeoutException {
            -3711  waitForState(deadlineTs, new 
            TableWaitForStateCallable() {
            -3712@Override
            -3713public boolean checkState(int 
            tries) throws IOException {
            -3714  return 
            !getAdmin().tableExists(tableName);
            -3715}
            -3716  });
            -3717}
            -3718
            -3719protected void 
            waitForSchemaUpdate(final long deadlineTs)
            -3720throws IOException, 
            TimeoutException {
            -3721  waitForState(deadlineTs, new 
            TableWaitForStateCallable() {
            -3722@Override
            -3723public boolean checkState(int 
            tries) throws IOException {
            -3724  return 
            getAdmin().getAlterStatus(tableName).getFirst() == 0;
            -3725}
            -3726  });
            -3727}
            -3728
            -3729protected void 
            waitForAllRegionsOnline(final long deadlineTs, final byte[][] splitKeys)
            -3730throws IOException, 
            TimeoutException {
            -3731  final TableDescriptor desc = 
            getTableDescriptor();
            -3732  final AtomicInteger actualRegCount 
            = new AtomicInteger(0);
            -3733  final MetaTableAccessor.Visitor 
            visitor = new MetaTableAccessor.Visitor() {
            -3734@Override
            -3735public boolean visit(Result 
            rowResult) throws IOException {
            

            [12/51] [partial] hbase-site git commit: Published site at 6198e1fc7dfa85c3bc6b2855f9a5fb5f4b2354ff.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html 
            b/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html
            index 3cf70b5..a0eff96 100644
            --- a/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html
            +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterFileSystem.html
            @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
             
             
             @InterfaceAudience.Private
            -public class MasterFileSystem
            +public class MasterFileSystem
             extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             This class abstracts a bunch of operations the HMaster 
            needs to interact with
              the underlying file system like creating the initial layout, checking file
            @@ -301,7 +301,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             getTempDir()
             
             
            -protected 
            org.apache.hadoop.fs.FileSystem
            +org.apache.hadoop.fs.FileSystem
             getWALFileSystem()
             
             
            @@ -351,7 +351,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             LOG
            -private static finalorg.slf4j.Logger LOG
            +private static finalorg.slf4j.Logger LOG
             
             
             
            @@ -360,7 +360,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             HBASE_DIR_PERMS
            -public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HBASE_DIR_PERMS
            +public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HBASE_DIR_PERMS
             Parameter name for HBase instance root directory 
            permission
             
             See Also:
            @@ -374,7 +374,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             HBASE_WAL_DIR_PERMS
            -public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HBASE_WAL_DIR_PERMS
            +public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HBASE_WAL_DIR_PERMS
             Parameter name for HBase WAL directory permission
             
             See Also:
            @@ -388,7 +388,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             conf
            -private finalorg.apache.hadoop.conf.Configuration conf
            +private finalorg.apache.hadoop.conf.Configuration conf
             
             
             
            @@ -397,7 +397,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             clusterId
            -privateClusterId clusterId
            +privateClusterId clusterId
             
             
             
            @@ -406,7 +406,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             fs
            -private finalorg.apache.hadoop.fs.FileSystem fs
            +private finalorg.apache.hadoop.fs.FileSystem fs
             
             
             
            @@ -415,7 +415,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             walFs
            -private finalorg.apache.hadoop.fs.FileSystem walFs
            +private finalorg.apache.hadoop.fs.FileSystem walFs
             
             
             
            @@ -424,7 +424,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             rootdir
            -private finalorg.apache.hadoop.fs.Path rootdir
            +private finalorg.apache.hadoop.fs.Path rootdir
             
             
             
            @@ -433,7 +433,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             tempdir
            -private finalorg.apache.hadoop.fs.Path tempdir
            +private finalorg.apache.hadoop.fs.Path tempdir
             
             
             
            @@ -442,7 +442,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             walRootDir
            -private finalorg.apache.hadoop.fs.Path walRootDir
            +private finalorg.apache.hadoop.fs.Path walRootDir
             
             
             
            @@ -451,7 +451,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             secureRootSubDirPerms
            -private finalorg.apache.hadoop.fs.permission.FsPermission secureRootSubDirPerms
            +private finalorg.apache.hadoop.fs.permission.FsPermission secureRootSubDirPerms
             
             
             
            @@ -460,7 +460,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             secureRootFilePerms
            -private finalorg.apache.hadoop.fs.permission.FsPermission secureRootFilePerms
            +private finalorg.apache.hadoop.fs.permission.FsPermission secureRootFilePerms
             
             
             
            @@ -469,7 +469,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             HiddenDirPerms
            -private finalorg.apache.hadoop.fs.permission.FsPermission HiddenDirPerms
            +private finalorg.apache.hadoop.fs.permission.FsPermission HiddenDirPerms
             
             
             
            @@ -478,7 +478,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             isSecurityEnabled
            -privateboolean isSecurityEnabled
            +privateboolean isSecurityEnabled
             
             
             
            @@ -495,7 +495,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             MasterFileSystem
            

            [12/51] [partial] hbase-site git commit: Published site at 14087cc919da9f2e0b1a68f701f6365ad9d1d71f.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
             
            b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
            index 4d28f9d..ca71c62 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
            @@ -18,7 +18,7 @@
             catch(err) {
             }
             //-->
            -var methods = 
            {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6};
            +var methods = 
            {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6};
             var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
            Methods"],4:["t3","Abstract Methods"]};
             var altColor = "altColor";
             var rowColor = "rowColor";
            @@ -295,420 +295,426 @@ public interface 
             long
            -getDataHitCount()
            +getCpRequestsCount()
            +Get the number of coprocessor requests to regions hosted on 
            this region server.
            +
             
             
             long
            +getDataHitCount()
            +
            +
            +long
             getDataInMemoryWithoutWAL()
             Ammount of data in the memstore but not in the WAL because 
            mutations explicitly had their
              WAL turned off.
             
             
            -
            +
             long
             getDataMissCount()
             
            -
            +
             long
             getDeleteFamilyBloomHitCount()
             
            -
            +
             long
             getDeleteFamilyBloomMissCount()
             
            -
            +
             long
             getFileInfoHitCount()
             
            -
            +
             long
             getFileInfoMissCount()
             
            -
            +
             long
             getFilteredReadRequestsCount()
             Get the number of filtered read requests to regions hosted 
            on this region server.
             
             
            -
            +
             long
             getFlushedCellsCount()
             Get the number of cells flushed to disk.
             
             
            -
            +
             long
             getFlushedCellsSize()
             Get the total amount of data flushed to disk, in 
            bytes.
             
             
            -
            +
             int
             getFlushQueueSize()
             Get the size of the flush queue.
             
             
            -
            +
             long
             getGeneralBloomMetaHitCount()
             
            -
            +
             long
             getGeneralBloomMetaMissCount()
             
            -
            +
             long
             getHedgedReadOps()
             
            -
            +
             long
             getHedgedReadWins()
             
            -
            +
             long
             getIntermediateIndexHitCount()
             
            -
            +
             long
             getIntermediateIndexMissCount()
             
            -
            +
             long
             getL1CacheHitCount()
             Hit count of L1 cache.
             
             
            -
            +
             double
             getL1CacheHitRatio()
             Hit ratio of L1 cache.
             
             
            -
            +
             long
             getL1CacheMissCount()
             Miss count of L1 cache.
             
             
            -
            +
             double
             getL1CacheMissRatio()
             Miss ratio of L1 cache.
             
             
            -
            +
             long
             getL2CacheHitCount()
             Hit count of L2 cache.
             
             
            -
            +
             double
             getL2CacheHitRatio()
             Hit ratio of L2 cache.
             
             
            -
            +
             long
             getL2CacheMissCount()
             Miss count of L2 cache.
             
             
            -
            +
             double
             getL2CacheMissRatio()
             Miss ratio of L2 cache.
             
             
            -
            +
             int
             getLargeCompactionQueueSize()
             
            -
            +
             long
             getLeafIndexHitCount()
             
            -
            +
             long
             getLeafIndexMissCount()
             
            -
            +
             long
             getMajorCompactedCellsCount()
             Get the number of cells processed during major 
            compactions.
             
             
            -
            +
             long
             getMajorCompactedCellsSize()
             Get the total amount of data processed during major 
            compactions, in bytes.
             
             
            -
            +
             long
             getMaxStoreFileAge()
             
            -
            +
             long
             getMemStoreLimit()
             
            -
            +
             long
             getMemStoreSize()
             Get the size of the memstore on this region server.
             
             
            -
            +
             long
             getMetaHitCount()
             
            -
            +
             long
             getMetaMissCount()
             
            -
            +
             long
             getMinStoreFileAge()
             
            -
            +
             long
             getMobFileCacheAccessCount()
             Gets the count of accesses to the mob file cache.
             
             
            -
            +
             long
             getMobFileCacheCount()
             Gets the count of cached mob files.
             
             
            -
            +
             

            [12/51] [partial] hbase-site git commit: Published site at 72784c2d836a4b977667449d3adec5e8d15453f5.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b11656f/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
            --
            diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
            index b6e7636..592c2cc 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
            @@ -356,3901 +356,3924 @@
             348  public FutureVoid 
            modifyTableAsync(TableDescriptor td) throws IOException {
             349ModifyTableResponse response = 
            executeCallable(
             350  new 
            MasterCallableModifyTableResponse(getConnection(), 
            getRpcControllerFactory()) {
            -351@Override
            -352protected ModifyTableResponse 
            rpcCall() throws Exception {
            -353  
            setPriority(td.getTableName());
            -354  ModifyTableRequest request = 
            RequestConverter.buildModifyTableRequest(
            -355td.getTableName(), td, 
            ng.getNonceGroup(), ng.newNonce());
            -356  return 
            master.modifyTable(getRpcController(), request);
            -357}
            -358  });
            -359return new ModifyTableFuture(this, 
            td.getTableName(), response);
            -360  }
            -361
            -362  @Override
            -363  public ListTableDescriptor 
            listTableDescriptorsByNamespace(byte[] name) throws IOException {
            -364return executeCallable(new 
            MasterCallableListTableDescriptor(getConnection(),
            -365getRpcControllerFactory()) {
            -366  @Override
            -367  protected 
            ListTableDescriptor rpcCall() throws Exception {
            -368return 
            master.listTableDescriptorsByNamespace(getRpcController(),
            -369
            ListTableDescriptorsByNamespaceRequest.newBuilder()
            -370  
            .setNamespaceName(Bytes.toString(name)).build())
            -371.getTableSchemaList()
            -372.stream()
            -373
            .map(ProtobufUtil::toTableDescriptor)
            -374
            .collect(Collectors.toList());
            -375  }
            -376});
            -377  }
            -378
            -379  @Override
            -380  public ListTableDescriptor 
            listTableDescriptors(ListTableName tableNames) throws IOException {
            -381return executeCallable(new 
            MasterCallableListTableDescriptor(getConnection(),
            -382getRpcControllerFactory()) {
            -383  @Override
            -384  protected 
            ListTableDescriptor rpcCall() throws Exception {
            -385GetTableDescriptorsRequest req 
            =
            -386
            RequestConverter.buildGetTableDescriptorsRequest(tableNames);
            -387  return 
            ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
            -388  req));
            -389  }
            -390});
            -391  }
            -392
            -393  @Override
            -394  public ListRegionInfo 
            getRegions(final ServerName sn) throws IOException {
            -395AdminService.BlockingInterface admin 
            = this.connection.getAdmin(sn);
            -396// TODO: There is no timeout on this 
            controller. Set one!
            -397HBaseRpcController controller = 
            rpcControllerFactory.newController();
            -398return 
            ProtobufUtil.getOnlineRegions(controller, admin);
            -399  }
            -400
            -401  @Override
            -402  public ListRegionInfo 
            getRegions(TableName tableName) throws IOException {
            -403if 
            (TableName.isMetaTableName(tableName)) {
            -404  return 
            Arrays.asList(RegionInfoBuilder.FIRST_META_REGIONINFO);
            -405} else {
            -406  return 
            MetaTableAccessor.getTableRegions(connection, tableName, true);
            -407}
            -408  }
            -409
            -410  private static class 
            AbortProcedureFuture extends ProcedureFutureBoolean {
            -411private boolean isAbortInProgress;
            -412
            -413public AbortProcedureFuture(
            -414final HBaseAdmin admin,
            -415final Long procId,
            -416final Boolean abortProcResponse) 
            {
            -417  super(admin, procId);
            -418  this.isAbortInProgress = 
            abortProcResponse;
            -419}
            -420
            -421@Override
            -422public Boolean get(long timeout, 
            TimeUnit unit)
            -423throws InterruptedException, 
            ExecutionException, TimeoutException {
            -424  if (!this.isAbortInProgress) {
            -425return false;
            -426  }
            -427  super.get(timeout, unit);
            -428  return true;
            -429}
            -430  }
            -431
            -432  /** @return Connection used by this 
            object. */
            -433  @Override
            -434  public Connection getConnection() {
            -435return connection;
            -436  }
            -437
            -438  @Override
            -439  public boolean tableExists(final 
            TableName tableName) throws IOException {
            -440return executeCallable(new 
            RpcRetryingCallableBoolean() {
            -441  @Override
            -442  protected Boolean rpcCall(int 
            callTimeout) throws Exception {
            -443return 
            MetaTableAccessor.tableExists(connection, tableName);
            -444  }
            -445});
            -446  }
            -447
            -448  @Override
            -449  public HTableDescriptor[] listTables() 
            throws IOException {
            -450return listTables((Pattern)null, 
            false);
            -451  }
            -452
            -453  @Override
            -454  public HTableDescriptor[] 
            listTables(Pattern pattern) throws IOException {
            -455return listTables(pattern, false);
            -456  }
            -457
            -458  @Override
            -459  public 

            [12/51] [partial] hbase-site git commit: Published site at 9101fc246f86445006bfbcdfda5cc495016dc280.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/65565d77/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
            index eb16038..74bacd8 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
            @@ -6,7 +6,7 @@
             
             
             
            -001/*
            +001/**
             002 * Licensed to the Apache Software 
            Foundation (ASF) under one
             003 * or more contributor license 
            agreements.  See the NOTICE file
             004 * distributed with this work for 
            additional information
            @@ -48,692 +48,692 @@
             040import java.util.Map;
             041import java.util.Map.Entry;
             042import java.util.Objects;
            -043import java.util.Set;
            -044import 
            java.util.concurrent.ExecutionException;
            -045import java.util.concurrent.Future;
            -046import java.util.concurrent.TimeUnit;
            -047import 
            java.util.concurrent.TimeoutException;
            -048import 
            java.util.concurrent.atomic.AtomicInteger;
            -049import 
            java.util.concurrent.atomic.AtomicReference;
            -050import java.util.function.Function;
            -051import java.util.regex.Pattern;
            -052import java.util.stream.Collectors;
            -053import javax.servlet.ServletException;
            -054import javax.servlet.http.HttpServlet;
            -055import 
            javax.servlet.http.HttpServletRequest;
            -056import 
            javax.servlet.http.HttpServletResponse;
            -057import 
            org.apache.commons.lang3.StringUtils;
            -058import 
            org.apache.hadoop.conf.Configuration;
            -059import org.apache.hadoop.fs.Path;
            -060import 
            org.apache.hadoop.hbase.ClusterId;
            -061import 
            org.apache.hadoop.hbase.ClusterMetrics;
            -062import 
            org.apache.hadoop.hbase.ClusterMetrics.Option;
            -063import 
            org.apache.hadoop.hbase.ClusterMetricsBuilder;
            -064import 
            org.apache.hadoop.hbase.DoNotRetryIOException;
            -065import 
            org.apache.hadoop.hbase.HBaseIOException;
            -066import 
            org.apache.hadoop.hbase.HBaseInterfaceAudience;
            -067import 
            org.apache.hadoop.hbase.HConstants;
            -068import 
            org.apache.hadoop.hbase.InvalidFamilyOperationException;
            -069import 
            org.apache.hadoop.hbase.MasterNotRunningException;
            -070import 
            org.apache.hadoop.hbase.MetaTableAccessor;
            -071import 
            org.apache.hadoop.hbase.NamespaceDescriptor;
            -072import 
            org.apache.hadoop.hbase.PleaseHoldException;
            -073import 
            org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
            -074import 
            org.apache.hadoop.hbase.ScheduledChore;
            -075import 
            org.apache.hadoop.hbase.ServerName;
            -076import 
            org.apache.hadoop.hbase.TableDescriptors;
            -077import 
            org.apache.hadoop.hbase.TableName;
            -078import 
            org.apache.hadoop.hbase.TableNotDisabledException;
            -079import 
            org.apache.hadoop.hbase.TableNotFoundException;
            -080import 
            org.apache.hadoop.hbase.UnknownRegionException;
            -081import 
            org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
            -082import 
            org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
            -083import 
            org.apache.hadoop.hbase.client.MasterSwitchType;
            -084import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -085import 
            org.apache.hadoop.hbase.client.Result;
            -086import 
            org.apache.hadoop.hbase.client.TableDescriptor;
            -087import 
            org.apache.hadoop.hbase.client.TableDescriptorBuilder;
            -088import 
            org.apache.hadoop.hbase.client.TableState;
            -089import 
            org.apache.hadoop.hbase.client.VersionInfoUtil;
            -090import 
            org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
            -091import 
            org.apache.hadoop.hbase.exceptions.DeserializationException;
            -092import 
            org.apache.hadoop.hbase.exceptions.MergeRegionException;
            -093import 
            org.apache.hadoop.hbase.executor.ExecutorType;
            -094import 
            org.apache.hadoop.hbase.favored.FavoredNodesManager;
            -095import 
            org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
            -096import 
            org.apache.hadoop.hbase.http.InfoServer;
            -097import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
            -098import 
            org.apache.hadoop.hbase.ipc.RpcServer;
            -099import 
            org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
            -100import 
            org.apache.hadoop.hbase.log.HBaseMarkers;
            -101import 
            org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
            -102import 
            org.apache.hadoop.hbase.master.assignment.AssignmentManager;
            -103import 
            org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
            -104import 
            org.apache.hadoop.hbase.master.assignment.RegionStates;
            -105import 
            org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
            -106import 
            org.apache.hadoop.hbase.master.balancer.BalancerChore;
            -107import 
            org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
            -108import 
            org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
            -109import 
            org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
            -110import 
            org.apache.hadoop.hbase.master.cleaner.CleanerChore;
            -111import 
            org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
            -112import 
            

            [12/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
            index 83c17c0..9df0225 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
            @@ -54,323 +54,362 @@
             046import org.apache.hadoop.io.IOUtils;
             047
             048import 
            org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
            -049
            +049import 
            org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
             050
            -051/**
            -052 * Compression in this class is lifted 
            off Compressor/KeyValueCompression.
            -053 * This is a pure coincidence... they are 
            independent and don't have to be compatible.
            -054 *
            -055 * This codec is used at server side for 
            writing cells to WAL as well as for sending edits
            -056 * as part of the distributed splitting 
            process.
            -057 */
            -058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
            -059  HBaseInterfaceAudience.PHOENIX, 
            HBaseInterfaceAudience.CONFIG})
            -060public class WALCellCodec implements 
            Codec {
            -061  /** Configuration key for the class to 
            use when encoding cells in the WAL */
            -062  public static final String 
            WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
            -063
            -064  protected final CompressionContext 
            compression;
            -065  protected final ByteStringUncompressor 
            statelessUncompressor = new ByteStringUncompressor() {
            -066@Override
            -067public byte[] uncompress(ByteString 
            data, Dictionary dict) throws IOException {
            -068  return 
            WALCellCodec.uncompressByteString(data, dict);
            -069}
            -070  };
            -071
            -072  /**
            -073   * bAll subclasses must 
            implement a no argument constructor/b
            -074   */
            -075  public WALCellCodec() {
            -076this.compression = null;
            -077  }
            -078
            -079  /**
            -080   * Default constructor - ball 
            subclasses must implement a constructor with this signature /b
            -081   * if they are to be dynamically loaded 
            from the {@link Configuration}.
            -082   * @param conf configuration to 
            configure ttthis/tt
            -083   * @param compression compression the 
            codec should support, can be ttnull/tt to indicate no
            -084   *  compression
            -085   */
            -086  public WALCellCodec(Configuration conf, 
            CompressionContext compression) {
            -087this.compression = compression;
            -088  }
            -089
            -090  public static String 
            getWALCellCodecClass(Configuration conf) {
            -091return 
            conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
            -092  }
            -093
            -094  /**
            -095   * Create and setup a {@link 
            WALCellCodec} from the {@code cellCodecClsName} and
            -096   * CompressionContext, if {@code 
            cellCodecClsName} is specified.
            -097   * Otherwise Cell Codec classname is 
            read from {@link Configuration}.
            -098   * Fully prepares the codec for use.
            -099   * @param conf {@link Configuration} to 
            read for the user-specified codec. If none is specified,
            -100   *  uses a {@link 
            WALCellCodec}.
            -101   * @param cellCodecClsName name of 
            codec
            -102   * @param compression compression the 
            codec should use
            -103   * @return a {@link WALCellCodec} ready 
            for use.
            -104   * @throws 
            UnsupportedOperationException if the codec cannot be instantiated
            -105   */
            -106
            -107  public static WALCellCodec 
            create(Configuration conf, String cellCodecClsName,
            -108  CompressionContext compression) 
            throws UnsupportedOperationException {
            -109if (cellCodecClsName == null) {
            -110  cellCodecClsName = 
            getWALCellCodecClass(conf);
            -111}
            -112return 
            ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
            -113{ Configuration.class, 
            CompressionContext.class }, new Object[] { conf, compression });
            -114  }
            -115
            -116  /**
            -117   * Create and setup a {@link 
            WALCellCodec} from the
            -118   * CompressionContext.
            -119   * Cell Codec classname is read from 
            {@link Configuration}.
            -120   * Fully prepares the codec for use.
            -121   * @param conf {@link Configuration} to 
            read for the user-specified codec. If none is specified,
            -122   *  uses a {@link 
            WALCellCodec}.
            -123   * @param compression compression the 
            codec should use
            -124   * @return a {@link WALCellCodec} ready 
            for use.
            -125   * @throws 
            UnsupportedOperationException if the codec cannot be instantiated
            -126   */
            -127  public static WALCellCodec 
            create(Configuration conf,
            -128  CompressionContext compression) 
            throws UnsupportedOperationException {
            -129String cellCodecClsName = 
            getWALCellCodecClass(conf);
            -130return 
            ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
            -131{ Configuration.class, 
            CompressionContext.class }, new Object[] { conf, 

            [12/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
            index 5420d82..6ea3672 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
            @@ -316,687 +316,728 @@
             308}
             309  }
             310
            -311  public enum ServerState { ONLINE, 
            SPLITTING, OFFLINE }
            -312  public static class ServerStateNode 
            implements ComparableServerStateNode {
            -313private final ServerReportEvent 
            reportEvent;
            -314
            -315private final 
            SetRegionStateNode regions;
            -316private final ServerName 
            serverName;
            -317
            -318private volatile ServerState state = 
            ServerState.ONLINE;
            -319private volatile int versionNumber = 
            0;
            -320
            -321public ServerStateNode(final 
            ServerName serverName) {
            -322  this.serverName = serverName;
            -323  this.regions = 
            ConcurrentHashMap.newKeySet();
            -324  this.reportEvent = new 
            ServerReportEvent(serverName);
            -325}
            -326
            -327public ServerName getServerName() {
            -328  return serverName;
            -329}
            +311  /**
            +312   * Server State.
            +313   */
            +314  public enum ServerState {
            +315/**
            +316 * Initial state. Available.
            +317 */
            +318ONLINE,
            +319
            +320/**
            +321 * Server expired/crashed. Currently 
            undergoing WAL splitting.
            +322 */
            +323SPLITTING,
            +324
            +325/**
            +326 * WAL splitting done.
            +327 */
            +328OFFLINE
            +329  }
             330
            -331public ServerState getState() {
            -332  return state;
            -333}
            -334
            -335public int getVersionNumber() {
            -336  return versionNumber;
            -337}
            -338
            -339public ProcedureEvent? 
            getReportEvent() {
            -340  return reportEvent;
            -341}
            +331  /**
            +332   * State of Server; list of hosted 
            regions, etc.
            +333   */
            +334  public static class ServerStateNode 
            implements ComparableServerStateNode {
            +335private final ServerReportEvent 
            reportEvent;
            +336
            +337private final 
            SetRegionStateNode regions;
            +338private final ServerName 
            serverName;
            +339
            +340private volatile ServerState state = 
            ServerState.ONLINE;
            +341private volatile int versionNumber = 
            0;
             342
            -343public boolean isInState(final 
            ServerState... expected) {
            -344  boolean expectedState = false;
            -345  if (expected != null) {
            -346for (int i = 0; i  
            expected.length; ++i) {
            -347  expectedState |= (state == 
            expected[i]);
            -348}
            -349  }
            -350  return expectedState;
            +343public ServerStateNode(final 
            ServerName serverName) {
            +344  this.serverName = serverName;
            +345  this.regions = 
            ConcurrentHashMap.newKeySet();
            +346  this.reportEvent = new 
            ServerReportEvent(serverName);
            +347}
            +348
            +349public ServerName getServerName() {
            +350  return serverName;
             351}
             352
            -353public void setState(final 
            ServerState state) {
            -354  this.state = state;
            +353public ServerState getState() {
            +354  return state;
             355}
             356
            -357public void setVersionNumber(final 
            int versionNumber) {
            -358  this.versionNumber = 
            versionNumber;
            +357public int getVersionNumber() {
            +358  return versionNumber;
             359}
             360
            -361public SetRegionStateNode 
            getRegions() {
            -362  return regions;
            +361public ProcedureEvent? 
            getReportEvent() {
            +362  return reportEvent;
             363}
             364
            -365public int getRegionCount() {
            -366  return regions.size();
            +365public boolean isOffline() {
            +366  return 
            this.state.equals(ServerState.OFFLINE);
             367}
             368
            -369public ArrayListRegionInfo 
            getRegionInfoList() {
            -370  ArrayListRegionInfo hris = 
            new ArrayListRegionInfo(regions.size());
            -371  for (RegionStateNode region: 
            regions) {
            -372
            hris.add(region.getRegionInfo());
            -373  }
            -374  return hris;
            -375}
            -376
            -377public void addRegion(final 
            RegionStateNode regionNode) {
            -378  this.regions.add(regionNode);
            -379}
            -380
            -381public void removeRegion(final 
            RegionStateNode regionNode) {
            -382  this.regions.remove(regionNode);
            -383}
            -384
            -385@Override
            -386public int compareTo(final 
            ServerStateNode other) {
            -387  return 
            getServerName().compareTo(other.getServerName());
            -388}
            -389
            -390@Override
            -391public int hashCode() {
            -392  return 
            getServerName().hashCode();
            +369public boolean isInState(final 
            ServerState... expected) {
            +370  boolean expectedState = false;
            +371  if (expected != null) {
            +372for (int i = 0; i  
            expected.length; ++i) {
            +373  expectedState |= (state == 
            expected[i]);
            +374}
            +375  }
            +376  return expectedState;
            +377}
            +378
            +379public void setState(final 
            ServerState state) {
            +380  

            [12/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
            index 168462e..67da347 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
            @@ -213,330 +213,337 @@
             205if 
            (master.getMasterCoprocessorHost() != null) {
             206  
            master.getMasterCoprocessorHost().preMoveServers(hostPorts, 
            request.getTargetGroup());
             207}
            -208
            groupAdminServer.moveServers(hostPorts, request.getTargetGroup());
            -209if 
            (master.getMasterCoprocessorHost() != null) {
            -210  
            master.getMasterCoprocessorHost().postMoveServers(hostPorts, 
            request.getTargetGroup());
            -211}
            -212  } catch (IOException e) {
            -213
            CoprocessorRpcUtils.setControllerException(controller, e);
            -214  }
            -215  done.run(builder.build());
            -216}
            -217
            -218@Override
            -219public void moveTables(RpcController 
            controller, MoveTablesRequest request,
            -220
            RpcCallbackMoveTablesResponse done) {
            -221  MoveTablesResponse.Builder builder 
            = MoveTablesResponse.newBuilder();
            -222  SetTableName tables = new 
            HashSet(request.getTableNameList().size());
            -223  for (HBaseProtos.TableName 
            tableName : request.getTableNameList()) {
            -224
            tables.add(ProtobufUtil.toTableName(tableName));
            -225  }
            -226  
            LOG.info(master.getClientIdAuditPrefix() + " move tables " + tables +" to 
            rsgroup "
            -227  + request.getTargetGroup());
            -228  try {
            -229if 
            (master.getMasterCoprocessorHost() != null) {
            -230  
            master.getMasterCoprocessorHost().preMoveTables(tables, 
            request.getTargetGroup());
            -231}
            -232
            groupAdminServer.moveTables(tables, request.getTargetGroup());
            -233if 
            (master.getMasterCoprocessorHost() != null) {
            -234  
            master.getMasterCoprocessorHost().postMoveTables(tables, 
            request.getTargetGroup());
            -235}
            -236  } catch (IOException e) {
            -237
            CoprocessorRpcUtils.setControllerException(controller, e);
            -238  }
            -239  done.run(builder.build());
            -240}
            -241
            -242@Override
            -243public void addRSGroup(RpcController 
            controller, AddRSGroupRequest request,
            -244
            RpcCallbackAddRSGroupResponse done) {
            -245  AddRSGroupResponse.Builder builder 
            = AddRSGroupResponse.newBuilder();
            -246  
            LOG.info(master.getClientIdAuditPrefix() + " add rsgroup " + 
            request.getRSGroupName());
            -247  try {
            -248if 
            (master.getMasterCoprocessorHost() != null) {
            -249  
            master.getMasterCoprocessorHost().preAddRSGroup(request.getRSGroupName());
            -250}
            -251
            groupAdminServer.addRSGroup(request.getRSGroupName());
            -252if 
            (master.getMasterCoprocessorHost() != null) {
            -253  
            master.getMasterCoprocessorHost().postAddRSGroup(request.getRSGroupName());
            -254}
            -255  } catch (IOException e) {
            -256
            CoprocessorRpcUtils.setControllerException(controller, e);
            -257  }
            -258  done.run(builder.build());
            -259}
            -260
            -261@Override
            -262public void 
            removeRSGroup(RpcController controller,
            -263RemoveRSGroupRequest request, 
            RpcCallbackRemoveRSGroupResponse done) {
            -264  RemoveRSGroupResponse.Builder 
            builder =
            -265  
            RemoveRSGroupResponse.newBuilder();
            -266  
            LOG.info(master.getClientIdAuditPrefix() + " remove rsgroup " + 
            request.getRSGroupName());
            -267  try {
            -268if 
            (master.getMasterCoprocessorHost() != null) {
            -269  
            master.getMasterCoprocessorHost().preRemoveRSGroup(request.getRSGroupName());
            -270}
            -271
            groupAdminServer.removeRSGroup(request.getRSGroupName());
            -272if 
            (master.getMasterCoprocessorHost() != null) {
            -273  
            master.getMasterCoprocessorHost().postRemoveRSGroup(request.getRSGroupName());
            -274}
            -275  } catch (IOException e) {
            -276
            CoprocessorRpcUtils.setControllerException(controller, e);
            -277  }
            -278  done.run(builder.build());
            -279}
            -280
            -281@Override
            -282public void 
            balanceRSGroup(RpcController controller,
            -283BalanceRSGroupRequest request, 
            RpcCallbackBalanceRSGroupResponse done) {
            -284  BalanceRSGroupResponse.Builder 
            builder = BalanceRSGroupResponse.newBuilder();
            -285  
            LOG.info(master.getClientIdAuditPrefix() + " balance rsgroup, group=" +
            -286  
            request.getRSGroupName());
            -287  try {
            -288if 
            (master.getMasterCoprocessorHost() != null) {
            -289  
            master.getMasterCoprocessorHost().preBalanceRSGroup(request.getRSGroupName());
            -290}
            -291boolean balancerRan = 
            groupAdminServer.balanceRSGroup(request.getRSGroupName());
            

            [12/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
            index 54b1f96..ed95cbf 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
            @@ -31,922 +31,906 @@
             023import java.io.ByteArrayInputStream;
             024import java.io.IOException;
             025import java.util.ArrayList;
            -026import java.util.Arrays;
            -027import java.util.Collections;
            -028import java.util.HashMap;
            -029import java.util.HashSet;
            -030import java.util.LinkedList;
            -031import java.util.List;
            -032import java.util.Map;
            -033import java.util.NavigableSet;
            -034import java.util.Set;
            -035import java.util.SortedSet;
            -036import java.util.TreeSet;
            -037import 
            java.util.concurrent.atomic.AtomicBoolean;
            -038
            -039import 
            org.apache.hadoop.conf.Configuration;
            -040import org.apache.hadoop.hbase.Cell;
            -041import 
            org.apache.hadoop.hbase.CellUtil;
            -042import 
            org.apache.hadoop.hbase.Coprocessor;
            -043import 
            org.apache.hadoop.hbase.DoNotRetryIOException;
            -044import 
            org.apache.hadoop.hbase.HColumnDescriptor;
            -045import 
            org.apache.hadoop.hbase.HConstants;
            -046import 
            org.apache.hadoop.hbase.HTableDescriptor;
            -047import 
            org.apache.hadoop.hbase.MetaTableAccessor;
            -048import 
            org.apache.hadoop.hbase.MetaTableAccessor.DefaultVisitorBase;
            -049import 
            org.apache.hadoop.hbase.ServerName;
            -050import 
            org.apache.hadoop.hbase.TableName;
            -051import 
            org.apache.hadoop.hbase.client.ClusterConnection;
            -052import 
            org.apache.hadoop.hbase.client.Delete;
            -053import 
            org.apache.hadoop.hbase.client.Get;
            -054import 
            org.apache.hadoop.hbase.client.Mutation;
            -055import 
            org.apache.hadoop.hbase.client.Put;
            -056import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -057import 
            org.apache.hadoop.hbase.client.Result;
            -058import 
            org.apache.hadoop.hbase.client.Scan;
            -059import 
            org.apache.hadoop.hbase.client.Table;
            -060import 
            org.apache.hadoop.hbase.client.TableState;
            -061import 
            org.apache.hadoop.hbase.constraint.ConstraintException;
            -062import 
            org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
            -063import 
            org.apache.hadoop.hbase.exceptions.DeserializationException;
            -064import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
            -065import 
            org.apache.hadoop.hbase.master.MasterServices;
            -066import 
            org.apache.hadoop.hbase.master.ServerListener;
            -067import 
            org.apache.hadoop.hbase.master.TableStateManager;
            -068import 
            org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
            -069import 
            org.apache.hadoop.hbase.net.Address;
            -070import 
            org.apache.hadoop.hbase.procedure2.Procedure;
            -071import 
            org.apache.hadoop.hbase.protobuf.ProtobufMagic;
            -072import 
            org.apache.hadoop.hbase.protobuf.ProtobufUtil;
            -073import 
            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
            -074import 
            org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos;
            -075import 
            org.apache.hadoop.hbase.quotas.QuotaTableUtil;
            -076import 
            org.apache.hadoop.hbase.quotas.QuotaUtil;
            -077import 
            org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
            -078import 
            org.apache.hadoop.hbase.security.access.AccessControlLists;
            -079import 
            org.apache.hadoop.hbase.util.Bytes;
            -080import 
            org.apache.hadoop.hbase.zookeeper.ZKUtil;
            -081import 
            org.apache.hadoop.hbase.zookeeper.ZKWatcher;
            -082import 
            org.apache.hadoop.hbase.zookeeper.ZNodePaths;
            -083import 
            org.apache.yetus.audience.InterfaceAudience;
            -084import 
            org.apache.zookeeper.KeeperException;
            -085import org.slf4j.Logger;
            -086import org.slf4j.LoggerFactory;
            -087
            -088import 
            org.apache.hbase.thirdparty.com.google.common.collect.Lists;
            -089import 
            org.apache.hbase.thirdparty.com.google.common.collect.Maps;
            -090import 
            org.apache.hbase.thirdparty.com.google.common.collect.Sets;
            -091import 
            org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
            -092import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
            -093
            -094/**
            -095 * This is an implementation of {@link 
            RSGroupInfoManager} which makes
            -096 * use of an HBase table as the 
            persistence store for the group information.
            -097 * It also makes use of zookeeper to 
            store group information needed
            -098 * for bootstrapping during offline 
            mode.
            -099 *
            -100 * h2Concurrency/h2
            -101 * RSGroup state is kept locally in Maps. 
            There is a rsgroup name to cached
            -102 * RSGroupInfo Map at {@link #rsGroupMap} 
            and a Map of tables to the name of the
            -103 * rsgroup they belong too (in {@link 
            #tableMap}). These Maps are persisted to the
            -104 * hbase:rsgroup table (and cached in zk) 
            on each modification.
            -105 *
            -106 * pMutations on state are 
            

            [12/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
            --
            diff --git 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
             
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
            index 3f8844b..cdb9398 100644
            --- 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
            +++ 
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
            @@ -140,2712 +140,2713 @@
             132public class PerformanceEvaluation 
            extends Configured implements Tool {
             133  static final String RANDOM_SEEK_SCAN = 
            "randomSeekScan";
             134  static final String RANDOM_READ = 
            "randomRead";
            -135  private static final Logger LOG = 
            LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
            -136  private static final ObjectMapper 
            MAPPER = new ObjectMapper();
            -137  static {
            -138
            MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
            -139  }
            -140
            -141  public static final String TABLE_NAME = 
            "TestTable";
            -142  public static final String 
            FAMILY_NAME_BASE = "info";
            -143  public static final byte[] FAMILY_ZERO 
            = Bytes.toBytes("info0");
            -144  public static final byte[] COLUMN_ZERO 
            = Bytes.toBytes("" + 0);
            -145  public static final int 
            DEFAULT_VALUE_LENGTH = 1000;
            -146  public static final int ROW_LENGTH = 
            26;
            -147
            -148  private static final int ONE_GB = 1024 
            * 1024 * 1000;
            -149  private static final int 
            DEFAULT_ROWS_PER_GB = ONE_GB / DEFAULT_VALUE_LENGTH;
            -150  // TODO : should we make this 
            configurable
            -151  private static final int TAG_LENGTH = 
            256;
            -152  private static final DecimalFormat FMT 
            = new DecimalFormat("0.##");
            -153  private static final MathContext CXT = 
            MathContext.DECIMAL64;
            -154  private static final BigDecimal 
            MS_PER_SEC = BigDecimal.valueOf(1000);
            -155  private static final BigDecimal 
            BYTES_PER_MB = BigDecimal.valueOf(1024 * 1024);
            -156  private static final TestOptions 
            DEFAULT_OPTS = new TestOptions();
            -157
            -158  private static MapString, 
            CmdDescriptor COMMANDS = new TreeMap();
            -159  private static final Path PERF_EVAL_DIR 
            = new Path("performance_evaluation");
            -160
            -161  static {
            -162
            addCommandDescriptor(AsyncRandomReadTest.class, "asyncRandomRead",
            -163"Run async random read test");
            -164
            addCommandDescriptor(AsyncRandomWriteTest.class, "asyncRandomWrite",
            -165"Run async random write test");
            -166
            addCommandDescriptor(AsyncSequentialReadTest.class, "asyncSequentialRead",
            -167"Run async sequential read 
            test");
            -168
            addCommandDescriptor(AsyncSequentialWriteTest.class, "asyncSequentialWrite",
            -169"Run async sequential write 
            test");
            -170
            addCommandDescriptor(AsyncScanTest.class, "asyncScan",
            -171"Run async scan test (read every 
            row)");
            -172
            addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
            -173  "Run random read test");
            -174
            addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
            -175  "Run random seek and scan 100 
            test");
            -176
            addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
            -177  "Run random seek scan with both 
            start and stop row (max 10 rows)");
            -178
            addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
            -179  "Run random seek scan with both 
            start and stop row (max 100 rows)");
            -180
            addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
            -181  "Run random seek scan with both 
            start and stop row (max 1000 rows)");
            -182
            addCommandDescriptor(RandomScanWithRange1Test.class, "scanRange1",
            -183  "Run random seek scan with both 
            start and stop row (max 1 rows)");
            -184
            addCommandDescriptor(RandomWriteTest.class, "randomWrite",
            -185  "Run random write test");
            -186
            addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
            -187  "Run sequential read test");
            -188
            addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
            -189  "Run sequential write test");
            -190addCommandDescriptor(ScanTest.class, 
            "scan",
            -191  "Run scan test (read every 
            row)");
            -192
            addCommandDescriptor(FilteredScanTest.class, "filterScan",
            -193  "Run scan test using a filter to 
            find a specific row based on it's value " +
            -194  "(make sure to use --rows=20)");
            -195
            addCommandDescriptor(IncrementTest.class, "increment",
            -196  "Increment on each row; clients 
            overlap on keyspace so some concurrent operations");
            -197
            addCommandDescriptor(AppendTest.class, "append",
            -198  "Append on each row; clients 
            overlap on keyspace so some concurrent operations");
            -199
            addCommandDescriptor(CheckAndMutateTest.class, "checkAndMutate",
            -200  "CheckAndMutate on each row; 
            clients overlap on keyspace so some concurrent operations");
            -201
            

            [12/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
            index cca21a9..2f8a48b 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
            @@ -2280,7 +2280,7 @@
             2272  this.cacheFlushCount = 
            snapshot.getCellsCount();
             2273  this.cacheFlushSize = 
            snapshot.getDataSize();
             2274  committedFiles = new 
            ArrayList(1);
            -2275  return new 
            MemStoreSize(snapshot.getMemStoreSize());
            +2275  return 
            snapshot.getMemStoreSize();
             2276}
             2277
             2278@Override
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
            index cca21a9..2f8a48b 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
            @@ -2280,7 +2280,7 @@
             2272  this.cacheFlushCount = 
            snapshot.getCellsCount();
             2273  this.cacheFlushSize = 
            snapshot.getDataSize();
             2274  committedFiles = new 
            ArrayList(1);
            -2275  return new 
            MemStoreSize(snapshot.getMemStoreSize());
            +2275  return 
            snapshot.getMemStoreSize();
             2276}
             2277
             2278@Override
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
            index 541a093..651511e 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
            @@ -58,96 +58,95 @@
             050  void clearSnapshot(long id) throws 
            UnexpectedStateException;
             051
             052  /**
            -053   * On flush, how much memory we will 
            clear.
            -054   * Flush will first clear out the data 
            in snapshot if any (It will take a second flush
            -055   * invocation to clear the current Cell 
            set). If snapshot is empty, current
            -056   * Cell set will be flushed.
            -057   *
            -058   * @return size of data that is going 
            to be flushed
            -059   */
            -060  MemStoreSize getFlushableSize();
            -061
            -062  /**
            -063   * Return the size of the snapshot(s) 
            if any
            -064   * @return size of the memstore 
            snapshot
            -065   */
            -066  MemStoreSize getSnapshotSize();
            -067
            -068  /**
            -069   * Write an update
            -070   * @param cell
            -071   * @param memstoreSizing The delta in 
            memstore size will be passed back via this.
            -072   *This will include both data 
            size and heap overhead delta.
            -073   */
            -074  void add(final Cell cell, 
            MemStoreSizing memstoreSizing);
            -075
            -076  /**
            -077   * Write the updates
            -078   * @param cells
            -079   * @param memstoreSizing The delta in 
            memstore size will be passed back via this.
            -080   *This will include both data 
            size and heap overhead delta.
            -081   */
            -082  void add(IterableCell cells, 
            MemStoreSizing memstoreSizing);
            -083
            -084  /**
            -085   * @return Oldest timestamp of all the 
            Cells in the MemStore
            -086   */
            -087  long timeOfOldestEdit();
            -088
            -089  /**
            -090   * Update or insert the specified 
            cells.
            -091   * p
            -092   * For each Cell, insert into MemStore. 
            This will atomically upsert the value for that
            -093   * row/family/qualifier. If a Cell did 
            already exist, it will then be removed.
            -094   * p
            -095   * Currently the memstoreTS is kept at 
            0 so as each insert happens, it will be immediately
            -096   * visible. May want to change this so 
            it is atomic across all KeyValues.
            -097   * p
            -098   * This is called under row lock, so 
            Get operations will still see updates atomically. Scans will
            -099   * only see each KeyValue update as 
            atomic.
            -100   * @param cells
            -101   * @param readpoint readpoint below 
            which we can safely remove duplicate Cells.
            -102   * @param memstoreSizing The delta in 
            memstore size will be passed back via this.
            -103   *This will include both data 
            size and heap overhead delta.
            -104   */
            -105  void upsert(IterableCell cells, 
            long readpoint, MemStoreSizing memstoreSizing);
            -106
            -107  /**
            -108   * @return scanner over the memstore. 
            This might include scanner over the snapshot when one is
            -109   * present.
            -110   */
            -111  ListKeyValueScanner 
            getScanners(long readPt) throws IOException;
            -112
            -113  /**
            -114   * @return 

            [12/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
            --
            diff --git 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
             
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
            index 2510283..418c60c 100644
            --- 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
            +++ 
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
            @@ -77,77 +77,77 @@
             069import 
            org.apache.hadoop.hbase.client.RowMutations;
             070import 
            org.apache.hadoop.hbase.client.Scan;
             071import 
            org.apache.hadoop.hbase.client.Table;
            -072import 
            org.apache.hadoop.hbase.filter.BinaryComparator;
            -073import 
            org.apache.hadoop.hbase.filter.Filter;
            -074import 
            org.apache.hadoop.hbase.filter.FilterAllFilter;
            -075import 
            org.apache.hadoop.hbase.filter.FilterList;
            -076import 
            org.apache.hadoop.hbase.filter.PageFilter;
            -077import 
            org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
            -078import 
            org.apache.hadoop.hbase.filter.WhileMatchFilter;
            -079import 
            org.apache.hadoop.hbase.io.compress.Compression;
            -080import 
            org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
            -081import 
            org.apache.hadoop.hbase.io.hfile.RandomDistribution;
            -082import 
            org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
            -083import 
            org.apache.hadoop.hbase.regionserver.BloomType;
            -084import 
            org.apache.hadoop.hbase.regionserver.CompactingMemStore;
            -085import 
            org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
            -086import 
            org.apache.hadoop.hbase.trace.SpanReceiverHost;
            -087import 
            org.apache.hadoop.hbase.trace.TraceUtil;
            -088import 
            org.apache.hadoop.hbase.util.ByteArrayHashKey;
            -089import 
            org.apache.hadoop.hbase.util.Bytes;
            -090import 
            org.apache.hadoop.hbase.util.Hash;
            -091import 
            org.apache.hadoop.hbase.util.MurmurHash;
            -092import 
            org.apache.hadoop.hbase.util.Pair;
            -093import 
            org.apache.hadoop.hbase.util.YammerHistogramUtils;
            -094import 
            org.apache.hadoop.io.LongWritable;
            -095import org.apache.hadoop.io.Text;
            -096import org.apache.hadoop.mapreduce.Job;
            -097import 
            org.apache.hadoop.mapreduce.Mapper;
            -098import 
            org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
            -099import 
            org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
            -100import 
            org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
            -101import org.apache.hadoop.util.Tool;
            -102import 
            org.apache.hadoop.util.ToolRunner;
            -103import 
            org.apache.htrace.core.ProbabilitySampler;
            -104import org.apache.htrace.core.Sampler;
            -105import 
            org.apache.htrace.core.TraceScope;
            -106import 
            org.apache.yetus.audience.InterfaceAudience;
            -107import org.slf4j.Logger;
            -108import org.slf4j.LoggerFactory;
            -109import 
            org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
            -110import 
            org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
            -111
            -112/**
            -113 * Script used evaluating HBase 
            performance and scalability.  Runs a HBase
            -114 * client that steps through one of a set 
            of hardcoded tests or 'experiments'
            -115 * (e.g. a random reads test, a random 
            writes test, etc.). Pass on the
            -116 * command-line which test to run and how 
            many clients are participating in
            -117 * this experiment. Run {@code 
            PerformanceEvaluation --help} to obtain usage.
            -118 *
            -119 * pThis class sets up and runs 
            the evaluation programs described in
            -120 * Section 7, iPerformance 
            Evaluation/i, of the a
            -121 * 
            href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
            -122 * paper, pages 8-10.
            -123 *
            -124 * pBy default, runs as a 
            mapreduce job where each mapper runs a single test
            -125 * client. Can also run as a 
            non-mapreduce, multithreaded application by
            -126 * specifying {@code --nomapred}. Each 
            client does about 1GB of data, unless
            -127 * specified otherwise.
            -128 */
            -129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
            -130public class PerformanceEvaluation 
            extends Configured implements Tool {
            -131  static final String RANDOM_SEEK_SCAN = 
            "randomSeekScan";
            -132  static final String RANDOM_READ = 
            "randomRead";
            -133  private static final Logger LOG = 
            LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
            -134  private static final ObjectMapper 
            MAPPER = new ObjectMapper();
            -135  static {
            -136
            MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
            -137  }
            -138
            -139  public static final String TABLE_NAME = 
            "TestTable";
            -140  public static final byte[] FAMILY_NAME 
            = Bytes.toBytes("info");
            -141  public static final byte [] COLUMN_ZERO 
            = Bytes.toBytes("" + 0);
            -142  public static final byte [] 
            QUALIFIER_NAME = COLUMN_ZERO;
            +072import 
            org.apache.hadoop.hbase.client.metrics.ScanMetrics;
            +073import 
            org.apache.hadoop.hbase.filter.BinaryComparator;
            +074import 
            

            [12/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
            --
            diff --git a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
            b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
            index 3168ee3..e159b3f 100644
            --- a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
            +++ b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
            @@ -145,8 +145,8 @@
             
             java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
             title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
             title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
             title="class or interface in java.io">Serializable)
             
            -org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
             org.apache.hadoop.hbase.backup.TestBackupDeleteWithFailures.Failure
            +org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html 
            b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
            index fc45e57..d865969 100644
            --- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
            +++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
            @@ -5307,18 +5307,22 @@
             
             
             static HBaseClassTestRule
            -TestWALEntrySinkFilter.CLASS_RULE
            +TestSerialReplicationEndpoint.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestRegionReplicaReplicationEndpoint.CLASS_RULE
            +TestWALEntrySinkFilter.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestReplicator.CLASS_RULE
            +TestRegionReplicaReplicationEndpoint.CLASS_RULE
             
             
             static HBaseClassTestRule
            +TestReplicator.CLASS_RULE
            +
            +
            +static HBaseClassTestRule
             TestRegionReplicaReplicationEndpointNoMaster.CLASS_RULE
             
             
            @@ -6225,42 +6229,46 @@
             
             
             static HBaseClassTestRule
            -TestCoprocessorScanPolicy.CLASS_RULE
            +TestHBaseFsckCleanReplicationBarriers.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestMiniClusterLoadSequential.CLASS_RULE
            +TestCoprocessorScanPolicy.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestFromClientSide3WoUnsafe.CLASS_RULE
            +TestMiniClusterLoadSequential.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestEncryptionTest.CLASS_RULE
            +TestFromClientSide3WoUnsafe.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestCompressionTest.CLASS_RULE
            +TestEncryptionTest.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestIdReadWriteLock.CLASS_RULE
            +TestCompressionTest.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestBoundedPriorityBlockingQueue.CLASS_RULE
            +TestIdReadWriteLock.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestMiniClusterLoadParallel.CLASS_RULE
            +TestBoundedPriorityBlockingQueue.CLASS_RULE
             
             
             static HBaseClassTestRule
            -TestRegionSplitCalculator.CLASS_RULE
            +TestMiniClusterLoadParallel.CLASS_RULE
             
             
             static HBaseClassTestRule
            +TestRegionSplitCalculator.CLASS_RULE
            +
            +
            +static HBaseClassTestRule
             TestIncrementingEnvironmentEdge.CLASS_RULE
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html 
            b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
            index 48f4ec2..bcd3437 100644
            --- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
            +++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
            @@ -3358,14 +3358,18 @@
             TestSerialReplicationChecker.UTIL
             
             
            +private static HBaseTestingUtility
            +TestSerialReplicationEndpoint.UTIL
            +
            +
             protected static HBaseTestingUtility
             TestReplicationSourceManager.utility
             
            -
            +
             private static HBaseTestingUtility
             TestGlobalThrottler.utility1
             
            -
            +
             private static HBaseTestingUtility
             TestGlobalThrottler.utility2
             
            @@ -4307,6 +4311,10 @@
             private static HBaseTestingUtility
             TestConnectionCache.UTIL
             
            +
            +private static HBaseTestingUtility
            +TestHBaseFsckCleanReplicationBarriers.UTIL
            +
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.TestUpdatableReplicationEndpoint.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.TestUpdatableReplicationEndpoint.html
             
            

            [12/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
            index 8302e28..c370eb9 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
            @@ -2113,3031 +2113,3033 @@
             2105
            errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
             2106tableName + " unable to 
            delete dangling table state " + tableState);
             2107  }
            -2108} else {
            -2109  
            errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
            -2110  tableName + " has dangling 
            table state " + tableState);
            -2111}
            -2112  }
            -2113}
            -2114// check that all tables have 
            states
            -2115for (TableName tableName : 
            tablesInfo.keySet()) {
            -2116  if (isTableIncluded(tableName) 
             !tableStates.containsKey(tableName)) {
            -2117if (fixMeta) {
            -2118  
            MetaTableAccessor.updateTableState(connection, tableName, 
            TableState.State.ENABLED);
            -2119  TableState newState = 
            MetaTableAccessor.getTableState(connection, tableName);
            -2120  if (newState == null) {
            -2121
            errors.reportError(ERROR_CODE.NO_TABLE_STATE,
            -2122"Unable to change state 
            for table " + tableName + " in meta ");
            -2123  }
            -2124} else {
            -2125  
            errors.reportError(ERROR_CODE.NO_TABLE_STATE,
            -2126  tableName + " has no state 
            in meta ");
            -2127}
            -2128  }
            -2129}
            -2130  }
            -2131
            -2132  private void preCheckPermission() 
            throws IOException, AccessDeniedException {
            -2133if 
            (shouldIgnorePreCheckPermission()) {
            -2134  return;
            -2135}
            -2136
            -2137Path hbaseDir = 
            FSUtils.getRootDir(getConf());
            -2138FileSystem fs = 
            hbaseDir.getFileSystem(getConf());
            -2139UserProvider userProvider = 
            UserProvider.instantiate(getConf());
            -2140UserGroupInformation ugi = 
            userProvider.getCurrent().getUGI();
            -2141FileStatus[] files = 
            fs.listStatus(hbaseDir);
            -2142for (FileStatus file : files) {
            -2143  try {
            -2144FSUtils.checkAccess(ugi, file, 
            FsAction.WRITE);
            -2145  } catch (AccessDeniedException 
            ace) {
            -2146LOG.warn("Got 
            AccessDeniedException when preCheckPermission ", ace);
            -2147
            errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + 
            ugi.getUserName()
            -2148  + " does not have write perms 
            to " + file.getPath()
            -2149  + ". Please rerun hbck as hdfs 
            user " + file.getOwner());
            -2150throw ace;
            -2151  }
            -2152}
            -2153  }
            -2154
            -2155  /**
            -2156   * Deletes region from meta table
            -2157   */
            -2158  private void deleteMetaRegion(HbckInfo 
            hi) throws IOException {
            -2159
            deleteMetaRegion(hi.metaEntry.getRegionName());
            -2160  }
            -2161
            -2162  /**
            -2163   * Deletes region from meta table
            -2164   */
            -2165  private void deleteMetaRegion(byte[] 
            metaKey) throws IOException {
            -2166Delete d = new Delete(metaKey);
            -2167meta.delete(d);
            -2168LOG.info("Deleted " + 
            Bytes.toString(metaKey) + " from META" );
            -2169  }
            -2170
            -2171  /**
            -2172   * Reset the split parent region info 
            in meta table
            -2173   */
            -2174  private void resetSplitParent(HbckInfo 
            hi) throws IOException {
            -2175RowMutations mutations = new 
            RowMutations(hi.metaEntry.getRegionName());
            -2176Delete d = new 
            Delete(hi.metaEntry.getRegionName());
            -2177
            d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
            -2178
            d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
            -2179mutations.add(d);
            -2180
            -2181RegionInfo hri = 
            RegionInfoBuilder.newBuilder(hi.metaEntry)
            -2182.setOffline(false)
            -2183.setSplit(false)
            -2184.build();
            -2185Put p = 
            MetaTableAccessor.makePutFromRegionInfo(hri, 
            EnvironmentEdgeManager.currentTime());
            -2186mutations.add(p);
            -2187
            -2188meta.mutateRow(mutations);
            -2189LOG.info("Reset split parent " + 
            hi.metaEntry.getRegionNameAsString() + " in META" );
            -2190  }
            -2191
            -2192  /**
            -2193   * This backwards-compatibility 
            wrapper for permanently offlining a region
            -2194   * that should not be alive.  If the 
            region server does not support the
            -2195   * "offline" method, it will use the 
            closest unassign method instead.  This
            -2196   * will basically work until one 
            attempts to disable or delete the affected
            -2197   * table.  The problem has to do with 
            in-memory only master state, so
            -2198   * restarting the HMaster or failing 
            over to another should fix this.
            -2199   */
            -2200  private void offline(byte[] 
            regionName) throws IOException {
            -2201String regionString = 
            Bytes.toStringBinary(regionName);
            -2202if (!rsSupportsOffline) {
            -2203  LOG.warn("Using unassign region " 
            + 

            [12/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
            index 50caf18..61bf913 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
            @@ -45,773 +45,774 @@
             037import java.util.TimeZone;
             038import java.util.concurrent.TimeUnit;
             039
            -040import 
            org.apache.commons.cli.CommandLine;
            -041import 
            org.apache.commons.cli.CommandLineParser;
            -042import 
            org.apache.commons.cli.HelpFormatter;
            -043import org.apache.commons.cli.Option;
            -044import 
            org.apache.commons.cli.OptionGroup;
            -045import org.apache.commons.cli.Options;
            -046import 
            org.apache.commons.cli.ParseException;
            -047import 
            org.apache.commons.cli.PosixParser;
            -048import 
            org.apache.commons.lang3.StringUtils;
            -049import 
            org.apache.hadoop.conf.Configuration;
            -050import 
            org.apache.hadoop.conf.Configured;
            -051import org.apache.hadoop.fs.FileSystem;
            -052import org.apache.hadoop.fs.Path;
            -053import org.apache.hadoop.hbase.Cell;
            -054import 
            org.apache.hadoop.hbase.CellComparator;
            -055import 
            org.apache.hadoop.hbase.CellUtil;
            -056import 
            org.apache.hadoop.hbase.HBaseConfiguration;
            -057import 
            org.apache.hadoop.hbase.HBaseInterfaceAudience;
            -058import 
            org.apache.hadoop.hbase.HConstants;
            -059import 
            org.apache.hadoop.hbase.HRegionInfo;
            -060import 
            org.apache.hadoop.hbase.KeyValue;
            -061import 
            org.apache.hadoop.hbase.KeyValueUtil;
            -062import 
            org.apache.hadoop.hbase.PrivateCellUtil;
            -063import 
            org.apache.hadoop.hbase.TableName;
            -064import org.apache.hadoop.hbase.Tag;
            -065import 
            org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
            -066import 
            org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
            -067import 
            org.apache.hadoop.hbase.mob.MobUtils;
            -068import 
            org.apache.hadoop.hbase.regionserver.HStoreFile;
            -069import 
            org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
            -070import 
            org.apache.hadoop.hbase.util.BloomFilter;
            -071import 
            org.apache.hadoop.hbase.util.BloomFilterFactory;
            -072import 
            org.apache.hadoop.hbase.util.BloomFilterUtil;
            -073import 
            org.apache.hadoop.hbase.util.Bytes;
            -074import 
            org.apache.hadoop.hbase.util.FSUtils;
            -075import 
            org.apache.hadoop.hbase.util.HFileArchiveUtil;
            -076import org.apache.hadoop.util.Tool;
            -077import 
            org.apache.hadoop.util.ToolRunner;
            -078import 
            org.apache.yetus.audience.InterfaceAudience;
            -079import 
            org.apache.yetus.audience.InterfaceStability;
            -080import org.slf4j.Logger;
            -081import org.slf4j.LoggerFactory;
            -082
            -083import 
            com.codahale.metrics.ConsoleReporter;
            -084import com.codahale.metrics.Counter;
            -085import com.codahale.metrics.Gauge;
            -086import com.codahale.metrics.Histogram;
            -087import com.codahale.metrics.Meter;
            -088import 
            com.codahale.metrics.MetricFilter;
            -089import 
            com.codahale.metrics.MetricRegistry;
            -090import 
            com.codahale.metrics.ScheduledReporter;
            -091import com.codahale.metrics.Snapshot;
            -092import com.codahale.metrics.Timer;
            -093
            -094/**
            -095 * Implements pretty-printing 
            functionality for {@link HFile}s.
            -096 */
            -097@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
            -098@InterfaceStability.Evolving
            -099public class HFilePrettyPrinter extends 
            Configured implements Tool {
            -100
            -101  private static final Logger LOG = 
            LoggerFactory.getLogger(HFilePrettyPrinter.class);
            -102
            -103  private Options options = new 
            Options();
            -104
            -105  private boolean verbose;
            -106  private boolean printValue;
            -107  private boolean printKey;
            -108  private boolean shouldPrintMeta;
            -109  private boolean printBlockIndex;
            -110  private boolean printBlockHeaders;
            -111  private boolean printStats;
            -112  private boolean checkRow;
            -113  private boolean checkFamily;
            -114  private boolean isSeekToRow = false;
            -115  private boolean checkMobIntegrity = 
            false;
            -116  private MapString, 
            ListPath mobFileLocations;
            -117  private static final int 
            FOUND_MOB_FILES_CACHE_CAPACITY = 50;
            -118  private static final int 
            MISSING_MOB_FILES_CACHE_CAPACITY = 20;
            -119  private PrintStream out = System.out;
            -120  private PrintStream err = System.err;
            -121
            -122  /**
            -123   * The row which the user wants to 
            specify and print all the KeyValues for.
            -124   */
            -125  private byte[] row = null;
            -126
            -127  private ListPath files = new 
            ArrayList();
            -128  private int count;
            -129
            -130  private static final String FOUR_SPACES 
            = "";
            -131
            -132  public HFilePrettyPrinter() {
            -133super();
            -134init();
            -135  }
            -136
            -137  public HFilePrettyPrinter(Configuration 
            conf) {
            -138super(conf);
            -139init();
            -140  }
            -141
            -142  private void init() {
            -143options.addOption("v", "verbose", 
            false,
            -144"Verbose output; 

            [12/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
            b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
            index d4296e6..d91cb65 100644
            --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
            +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
            @@ -363,14 +363,14 @@ service.
             
             
             private TableName
            -MetaTableAccessor.TableVisitorBase.tableName
            -
            -
            -private TableName
             HRegionInfo.tableName
             Deprecated.
             
             
            +
            +private TableName
            +MetaTableAccessor.TableVisitorBase.tableName
            +
             
             
             
            @@ -2065,59 +2065,51 @@ service.
             
             
             private TableName
            -HRegionLocator.tableName
            +AsyncClientScanner.tableName
             
             
            -private TableName
            -ScannerCallableWithReplicas.tableName
            -
            -
             protected TableName
            -ClientScanner.tableName
            -
            -
            -private TableName
            -AsyncClientScanner.tableName
            +RpcRetryingCallerWithReadReplicas.tableName
             
             
             private TableName
            -AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
            +AsyncProcessTask.tableName
             
             
             private TableName
            -AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
            +AsyncProcessTask.Builder.tableName
             
             
             private TableName
            -RegionInfoBuilder.tableName
            +RegionServerCallable.tableName
             
             
             private TableName
            -RegionInfoBuilder.MutableRegionInfo.tableName
            +AsyncSingleRequestRpcRetryingCaller.tableName
             
             
            -private TableName
            -RawAsyncTableImpl.tableName
            +protected TableName
            +RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
             
             
             private TableName
            -RegionCoprocessorRpcChannelImpl.tableName
            +TableState.tableName
             
             
            -private TableName
            -AsyncTableRegionLocatorImpl.tableName
            +protected TableName
            +TableBuilderBase.tableName
             
             
             protected TableName
            -RegionAdminServiceCallable.tableName
            +ClientScanner.tableName
             
             
            -private TableName
            -HTable.tableName
            +protected TableName
            +RegionAdminServiceCallable.tableName
             
             
             private TableName
            -BufferedMutatorImpl.tableName
            +HTable.tableName
             
             
             private TableName
            @@ -2129,47 +2121,55 @@ service.
             
             
             private TableName
            -HBaseAdmin.TableFuture.tableName
            +AsyncTableRegionLocatorImpl.tableName
             
             
             private TableName
            -AsyncRequestFutureImpl.tableName
            +HRegionLocator.tableName
             
             
             private TableName
            -AsyncProcessTask.tableName
            +BufferedMutatorImpl.tableName
             
             
             private TableName
            -AsyncProcessTask.Builder.tableName
            +RawAsyncTableImpl.tableName
             
             
             protected TableName
            -RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
            +AsyncTableBuilderBase.tableName
             
             
             private TableName
            -RegionServerCallable.tableName
            +RegionCoprocessorRpcChannelImpl.tableName
             
             
             private TableName
            -AsyncSingleRequestRpcRetryingCaller.tableName
            +ScannerCallableWithReplicas.tableName
             
             
            -protected TableName
            -TableBuilderBase.tableName
            +private TableName
            +HBaseAdmin.TableFuture.tableName
             
             
            -protected TableName
            -RpcRetryingCallerWithReadReplicas.tableName
            +private TableName
            +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
             
             
            -protected TableName
            -AsyncTableBuilderBase.tableName
            +private TableName
            +AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
             
             
             private TableName
            -TableState.tableName
            +RegionInfoBuilder.tableName
            +
            +
            +private TableName
            +RegionInfoBuilder.MutableRegionInfo.tableName
            +
            +
            +private TableName
            +AsyncRequestFutureImpl.tableName
             
             
             
            @@ -2211,9 +2211,7 @@ service.
             
             
             TableName
            -AsyncTable.getName()
            -Gets the fully qualified table name instance of this 
            table.
            -
            +AsyncTableImpl.getName()
             
             
             TableName
            @@ -2223,21 +2221,26 @@ service.
             
             
             TableName
            -HRegionLocator.getName()
            +BufferedMutator.getName()
            +Gets the fully qualified table name instance of the table 
            that this BufferedMutator writes to.
            +
             
             
             TableName
            -AsyncTableRegionLocator.getName()
            -Gets the fully qualified table name instance of the table 
            whose region we want to locate.
            +AsyncBufferedMutator.getName()
            +Gets the fully qualified table name instance of the table 
            that this
            + AsyncBufferedMutator writes to.
             
             
             
             TableName
            -AsyncTableImpl.getName()
            +HTable.getName()
             
             
             TableName
            -RawAsyncTableImpl.getName()
            +AsyncTable.getName()
            +Gets the fully qualified table name instance of this 
            table.
            +
             
             
             TableName
            @@ -2245,34 +2248,31 @@ service.
             
             
             TableName
            -BufferedMutator.getName()
            -Gets the fully qualified table name instance of the table 
            that this BufferedMutator writes to.
            -
            +HRegionLocator.getName()
             
             
             TableName
            -RegionLocator.getName()
            -Gets the fully qualified table name instance of this 
            table.
            -
            +BufferedMutatorImpl.getName()
             
             
             TableName
            -AsyncBufferedMutatorImpl.getName()
            +RawAsyncTableImpl.getName()
             
             
             TableName
            -HTable.getName()
            +RegionLocator.getName()
            +Gets the fully qualified table name instance of this 
            table.
            +
             
             
             TableName
            -BufferedMutatorImpl.getName()
            +AsyncTableRegionLocator.getName()
            +Gets the fully qualified table name instance of the table 
            whose region we want to locate.
            +
             
             
             TableName
            

            [12/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html 
            b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
            index 180e58b..267f485 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
            @@ -263,19 +263,19 @@
             
             
             https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
             title="class or interface in java.util">OptionalRegionObserver
            -ScanModifyingObserver.getRegionObserver()
            +WriteHeavyIncrementObserver.getRegionObserver()
             
             
             https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
             title="class or interface in java.util">OptionalRegionObserver
            -WriteHeavyIncrementObserver.getRegionObserver()
            +ScanModifyingObserver.getRegionObserver()
             
             
             https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
             title="class or interface in java.util">OptionalRegionObserver
            -ZooKeeperScanPolicyObserver.getRegionObserver()
            +ValueRewritingObserver.getRegionObserver()
             
             
             https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
             title="class or interface in java.util">OptionalRegionObserver
            -ValueRewritingObserver.getRegionObserver()
            +ZooKeeperScanPolicyObserver.getRegionObserver()
             
             
             https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
             title="class or interface in java.util">OptionalRegionObserver
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
             
            b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
            index b487fc4..f0fc745 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
            @@ -339,6 +339,14 @@
             
             
             void
            +ProcedureCoordinatorRpcs.sendAbortToMembers(ProcedureprocName,
            +  ForeignExceptioncause)
            +Notify the members that the coordinator has aborted the 
            procedure and that it should release
            + barrier resources.
            +
            +
            +
            +void
             ZKProcedureCoordinator.sendAbortToMembers(Procedureproc,
               ForeignExceptionee)
             This is the abort message being sent by the coordinator to 
            member
            @@ -347,14 +355,6 @@
              coordinator.
             
             
            -
            -void
            -ProcedureCoordinatorRpcs.sendAbortToMembers(ProcedureprocName,
            -  ForeignExceptioncause)
            -Notify the members that the coordinator has aborted the 
            procedure and that it should release
            - barrier resources.
            -
            -
             
             void
             ProcedureMemberRpcs.sendMemberAborted(Subproceduresub,
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
             
            b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            index 49b5557..338b7a4 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            @@ -144,17 +144,15 @@
             
             
             
            -static HColumnDescriptor
            -HColumnDescriptor.parseFrom(byte[]bytes)
            -Deprecated.
            -
            -
            -
             static HTableDescriptor
             HTableDescriptor.parseFrom(byte[]bytes)
             Deprecated.
             
             
            +
            +static ClusterId
            +ClusterId.parseFrom(byte[]bytes)
            +
             
             static HRegionInfo
             HRegionInfo.parseFrom(byte[]bytes)
            @@ -165,8 +163,10 @@
             
             
             
            -static ClusterId
            -ClusterId.parseFrom(byte[]bytes)
            +static HColumnDescriptor
            +HColumnDescriptor.parseFrom(byte[]bytes)
            +Deprecated.
            +
             
             
             static SplitLogTask
            @@ -220,17 +220,17 @@
             TableDescriptorBuilder.ModifyableTableDescriptor.parseFrom(byte[]bytes)
             
             
            +static RegionInfo
            +RegionInfo.parseFrom(byte[]bytes)
            +
            +
             static ColumnFamilyDescriptor
             ColumnFamilyDescriptorBuilder.parseFrom(byte[]pbBytes)
             
            -
            +
             private static ColumnFamilyDescriptor
             ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.parseFrom(byte[]bytes)
             
            -
            -static RegionInfo
            -RegionInfo.parseFrom(byte[]bytes)
            -
             
             static RegionInfo
             RegionInfo.parseFrom(byte[]bytes,
            @@ -305,153 +305,153 @@
             ByteArrayComparable.parseFrom(byte[]pbBytes)
             
             
            -static ColumnPrefixFilter
            -ColumnPrefixFilter.parseFrom(byte[]pbBytes)
            +static SingleColumnValueExcludeFilter
            +SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
             
             
            -static 

            [12/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
             
            b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
            index a6c6bcc..65d4b29 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
            @@ -113,17 +113,17 @@
             
             
             
            -private Batch.CallbackCResult
            -AsyncRequestFutureImpl.callback
            -
            -
             private Batch.CallbackT
             AsyncProcessTask.callback
             
            -
            +
             private Batch.CallbackT
             AsyncProcessTask.Builder.callback
             
            +
            +private Batch.CallbackCResult
            +AsyncRequestFutureImpl.callback
            +
             
             
             
            @@ -148,42 +148,50 @@
             
             
             Rvoid
            -Table.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">List? extends Rowactions,
            +HTable.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">List? extends Rowactions,
              https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object[]results,
            - Batch.CallbackRcallback)
            -Same as Table.batch(List,
             Object[]), but with a callback.
            -
            + Batch.CallbackRcallback)
             
             
             Rvoid
            -HTable.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">List? extends Rowactions,
            +Table.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">List? extends Rowactions,
              https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object[]results,
            - Batch.CallbackRcallback)
            + Batch.CallbackRcallback)
            +Same as Table.batch(List,
             Object[]), but with a callback.
            +
             
             
             R extends 
            com.google.protobuf.Messagevoid
            -Table.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
            +HTable.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
            com.google.protobuf.Messagerequest,
            byte[]startKey,
            byte[]endKey,
            RresponsePrototype,
            -   Batch.CallbackRcallback)
            -Creates an instance of the given Service 
            subclass for each table
            - region spanning the range from the startKey row to 
            endKey row (inclusive), all
            - the invocations to the same region server will be batched into one call.
            -
            +   Batch.CallbackRcallback)
             
             
             R extends 
            com.google.protobuf.Messagevoid
            -HTable.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
            +Table.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
            com.google.protobuf.Messagerequest,
            byte[]startKey,
            byte[]endKey,
            RresponsePrototype,
            -   Batch.CallbackRcallback)
            +   Batch.CallbackRcallback)
            +Creates an instance of the given Service 
            subclass for each table
            + region spanning the range from the startKey row to 
            endKey row (inclusive), all
            + the invocations to the same region server will be batched into one call.
            +
             
             
             T extends 
            com.google.protobuf.Service,Rvoid
            +HTable.coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
             title="class or interface in java.lang">ClassTservice,
            +  byte[]startKey,
            +  byte[]endKey,
            +  Batch.CallT,Rcallable,
            +  Batch.CallbackRcallback)
            +
            +
            +T extends 
            com.google.protobuf.Service,Rvoid
             Table.coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
             title="class or interface in java.lang">ClassTservice,
               byte[]startKey,
               byte[]endKey,
            @@ -195,14 +203,6 @@
              with each Service instance.
             
             
            -
            -T extends 
            com.google.protobuf.Service,Rvoid
            -HTable.coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
             title="class or interface in java.lang">ClassTservice,
            -  byte[]startKey,
            -  byte[]endKey,
            -  Batch.CallT,Rcallable,
            -  Batch.CallbackRcallback)
            -
             
             static Rvoid
             HTable.doBatchWithCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">List? extends Rowactions,
            
            

            [12/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
            index 9971079..03c8b000 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
            @@ -49,1067 +49,1082 @@
             041import org.apache.hadoop.fs.Path;
             042import 
            org.apache.hadoop.hbase.HConstants;
             043import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -044import 
            org.apache.hadoop.hbase.trace.TraceUtil;
            -045import 
            org.apache.hadoop.hbase.util.Bytes;
            -046import 
            org.apache.hadoop.hbase.util.ClassSize;
            -047import 
            org.apache.hadoop.hbase.util.FSUtils;
            -048import 
            org.apache.hadoop.hbase.util.HasThread;
            -049import 
            org.apache.hadoop.hbase.util.Threads;
            -050import 
            org.apache.hadoop.hbase.wal.FSHLogProvider;
            -051import 
            org.apache.hadoop.hbase.wal.WALEdit;
            -052import 
            org.apache.hadoop.hbase.wal.WALKeyImpl;
            -053import 
            org.apache.hadoop.hbase.wal.WALProvider.Writer;
            -054import 
            org.apache.hadoop.hdfs.DFSOutputStream;
            -055import 
            org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
            -056import 
            org.apache.hadoop.hdfs.protocol.DatanodeInfo;
            -057import 
            org.apache.htrace.core.TraceScope;
            -058import 
            org.apache.yetus.audience.InterfaceAudience;
            -059import org.slf4j.Logger;
            -060import org.slf4j.LoggerFactory;
            -061import 
            org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
            -062
            -063/**
            -064 * The default implementation of FSWAL.
            -065 */
            -066@InterfaceAudience.Private
            -067public class FSHLog extends 
            AbstractFSWALWriter {
            -068  // IMPLEMENTATION NOTES:
            -069  //
            -070  // At the core is a ring buffer. Our 
            ring buffer is the LMAX Disruptor. It tries to
            -071  // minimize synchronizations and 
            volatile writes when multiple contending threads as is the case
            -072  // here appending and syncing on a 
            single WAL. The Disruptor is configured to handle multiple
            -073  // producers but it has one consumer 
            only (the producers in HBase are IPC Handlers calling append
            -074  // and then sync). The single 
            consumer/writer pulls the appends and syncs off the ring buffer.
            -075  // When a handler calls sync, it is 
            given back a future. The producer 'blocks' on the future so
            -076  // it does not return until the sync 
            completes. The future is passed over the ring buffer from
            -077  // the producer/handler to the consumer 
            thread where it does its best to batch up the producer
            -078  // syncs so one WAL sync actually spans 
            multiple producer sync invocations. How well the
            -079  // batching works depends on the write 
            rate; i.e. we tend to batch more in times of
            -080  // high writes/syncs.
            -081  //
            -082  // Calls to append now also wait until 
            the append has been done on the consumer side of the
            -083  // disruptor. We used to not wait but 
            it makes the implementation easier to grok if we have
            -084  // the region edit/sequence id after 
            the append returns.
            -085  //
            -086  // TODO: Handlers need to coordinate 
            appending AND syncing. Can we have the threads contend
            -087  // once only? Probably hard given syncs 
            take way longer than an append.
            -088  //
            -089  // The consumer threads pass the syncs 
            off to multiple syncing threads in a round robin fashion
            -090  // to ensure we keep up back-to-back FS 
            sync calls (FS sync calls are the long poll writing the
            -091  // WAL). The consumer thread passes the 
            futures to the sync threads for it to complete
            -092  // the futures when done.
            -093  //
            -094  // The 'sequence' in the below is the 
            sequence of the append/sync on the ringbuffer. It
            -095  // acts as a sort-of transaction id. It 
            is always incrementing.
            -096  //
            -097  // The RingBufferEventHandler class 
            hosts the ring buffer consuming code. The threads that
            -098  // do the actual FS sync are 
            implementations of SyncRunner. SafePointZigZagLatch is a
            -099  // synchronization class used to halt 
            the consumer at a safe point -- just after all outstanding
            -100  // syncs and appends have completed -- 
            so the log roller can swap the WAL out under it.
            -101  //
            -102  // We use ring buffer sequence as txid 
            of FSWALEntry and SyncFuture.
            -103  private static final Logger LOG = 
            LoggerFactory.getLogger(FSHLog.class);
            -104
            -105  /**
            -106   * The nexus at which all incoming 
            handlers meet. Does appends and sync with an ordering. Appends
            -107   * and syncs are each put on the ring 
            which means handlers need to smash up against the ring twice
            -108   * (can we make it once only? ... maybe 
            not since time to append is so different from time to sync
            -109   * and sometimes we don't want to sync 
            or we want to async the sync). The ring is where we make
            -110   * 

            [12/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
            index d654af2..3cec2fd 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
            @@ -29,312 +29,289 @@
             021import java.io.IOException;
             022import java.util.ArrayList;
             023import java.util.Collection;
            -024import java.util.HashMap;
            -025import java.util.List;
            -026import java.util.concurrent.Callable;
            -027import 
            java.util.concurrent.ExecutionException;
            -028import java.util.concurrent.Executors;
            -029import java.util.concurrent.TimeUnit;
            -030import 
            org.apache.hadoop.conf.Configuration;
            -031import 
            org.apache.hadoop.hbase.ClusterMetrics;
            -032import 
            org.apache.hadoop.hbase.HDFSBlocksDistribution;
            -033import 
            org.apache.hadoop.hbase.ServerName;
            -034import 
            org.apache.hadoop.hbase.TableName;
            -035import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -036import 
            org.apache.hadoop.hbase.client.TableDescriptor;
            -037import 
            org.apache.hadoop.hbase.master.MasterServices;
            -038import 
            org.apache.hadoop.hbase.master.assignment.AssignmentManager;
            -039import 
            org.apache.hadoop.hbase.regionserver.HRegion;
            -040import 
            org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
            -041import 
            org.apache.yetus.audience.InterfaceAudience;
            -042import org.slf4j.Logger;
            -043import org.slf4j.LoggerFactory;
            -044import 
            org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;
            -045import 
            org.apache.hbase.thirdparty.com.google.common.cache.CacheLoader;
            -046import 
            org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache;
            -047import 
            org.apache.hbase.thirdparty.com.google.common.collect.Lists;
            -048import 
            org.apache.hbase.thirdparty.com.google.common.util.concurrent.Futures;
            -049import 
            org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture;
            -050import 
            org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;
            -051import 
            org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors;
            -052import 
            org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
            -053
            -054/**
            -055 * This will find where data for a region 
            is located in HDFS. It ranks
            -056 * {@link ServerName}'s by the size of 
            the store files they are holding for a
            -057 * given region.
            -058 *
            -059 */
            -060@InterfaceAudience.Private
            -061class RegionLocationFinder {
            -062  private static final Logger LOG = 
            LoggerFactory.getLogger(RegionLocationFinder.class);
            -063  private static final long CACHE_TIME = 
            240 * 60 * 1000;
            -064  private static final 
            HDFSBlocksDistribution EMPTY_BLOCK_DISTRIBUTION = new 
            HDFSBlocksDistribution();
            -065  private Configuration conf;
            -066  private volatile ClusterMetrics 
            status;
            -067  private MasterServices services;
            -068  private final ListeningExecutorService 
            executor;
            -069  // Do not scheduleFullRefresh at master 
            startup
            -070  private long lastFullRefresh = 
            EnvironmentEdgeManager.currentTime();
            -071
            -072  private CacheLoaderRegionInfo, 
            HDFSBlocksDistribution loader =
            -073  new CacheLoaderRegionInfo, 
            HDFSBlocksDistribution() {
            -074
            -075@Override
            -076public 
            ListenableFutureHDFSBlocksDistribution reload(final RegionInfo hri,
            -077HDFSBlocksDistribution oldValue) 
            throws Exception {
            -078  return executor.submit(new 
            CallableHDFSBlocksDistribution() {
            -079@Override
            -080public HDFSBlocksDistribution 
            call() throws Exception {
            -081  return 
            internalGetTopBlockLocation(hri);
            -082}
            -083  });
            -084}
            -085
            -086@Override
            -087public HDFSBlocksDistribution 
            load(RegionInfo key) throws Exception {
            -088  return 
            internalGetTopBlockLocation(key);
            -089}
            -090  };
            +024import java.util.Collections;
            +025import java.util.HashMap;
            +026import java.util.List;
            +027import java.util.Map;
            +028import java.util.concurrent.Callable;
            +029import 
            java.util.concurrent.ExecutionException;
            +030import java.util.concurrent.Executors;
            +031import java.util.concurrent.TimeUnit;
            +032
            +033import 
            org.apache.commons.collections4.CollectionUtils;
            +034import 
            org.apache.commons.collections4.MultiValuedMap;
            +035import 
            org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
            +036import 
            org.apache.hadoop.conf.Configuration;
            +037import 
            org.apache.hadoop.hbase.ClusterMetrics;
            +038import 
            org.apache.hadoop.hbase.HDFSBlocksDistribution;
            +039import 
            org.apache.hadoop.hbase.ServerName;
            +040import 
            org.apache.hadoop.hbase.TableName;
            +041import 
            org.apache.hadoop.hbase.client.RegionInfo;
            +042import 
            org.apache.hadoop.hbase.client.TableDescriptor;
            +043import 
            

            [12/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
            index b99f924..2bb6cea 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
            @@ -37,1779 +37,1734 @@
             029import java.util.UUID;
             030import 
            java.util.concurrent.ConcurrentHashMap;
             031import 
            java.util.concurrent.ConcurrentMap;
            -032import java.util.regex.Matcher;
            -033
            -034import 
            org.apache.commons.collections4.map.AbstractReferenceMap;
            -035import 
            org.apache.commons.collections4.map.ReferenceMap;
            -036import 
            org.apache.hadoop.conf.Configuration;
            -037import org.apache.hadoop.fs.FileSystem;
            -038import org.apache.hadoop.fs.Path;
            -039import org.apache.hadoop.hbase.Cell;
            -040import 
            org.apache.hadoop.hbase.CompareOperator;
            -041import 
            org.apache.hadoop.hbase.Coprocessor;
            -042import 
            org.apache.hadoop.hbase.HBaseConfiguration;
            -043import 
            org.apache.hadoop.hbase.HConstants;
            -044import 
            org.apache.hadoop.hbase.RawCellBuilder;
            -045import 
            org.apache.hadoop.hbase.RawCellBuilderFactory;
            -046import 
            org.apache.hadoop.hbase.ServerName;
            -047import 
            org.apache.hadoop.hbase.SharedConnection;
            -048import 
            org.apache.hadoop.hbase.client.Append;
            -049import 
            org.apache.hadoop.hbase.client.Connection;
            -050import 
            org.apache.hadoop.hbase.client.Delete;
            -051import 
            org.apache.hadoop.hbase.client.Durability;
            -052import 
            org.apache.hadoop.hbase.client.Get;
            -053import 
            org.apache.hadoop.hbase.client.Increment;
            -054import 
            org.apache.hadoop.hbase.client.Mutation;
            -055import 
            org.apache.hadoop.hbase.client.Put;
            -056import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -057import 
            org.apache.hadoop.hbase.client.Result;
            -058import 
            org.apache.hadoop.hbase.client.Scan;
            -059import 
            org.apache.hadoop.hbase.client.TableDescriptor;
            -060import 
            org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
            -061import 
            org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
            -062import 
            org.apache.hadoop.hbase.coprocessor.CoprocessorException;
            -063import 
            org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
            -064import 
            org.apache.hadoop.hbase.coprocessor.CoprocessorService;
            -065import 
            org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
            -066import 
            org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
            -067import 
            org.apache.hadoop.hbase.coprocessor.EndpointObserver;
            -068import 
            org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
            -069import 
            org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
            -070import 
            org.apache.hadoop.hbase.coprocessor.ObserverContext;
            -071import 
            org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
            -072import 
            org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
            -073import 
            org.apache.hadoop.hbase.coprocessor.RegionObserver;
            -074import 
            org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
            -075import 
            org.apache.hadoop.hbase.filter.ByteArrayComparable;
            -076import 
            org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
            -077import 
            org.apache.hadoop.hbase.io.Reference;
            -078import 
            org.apache.hadoop.hbase.io.hfile.CacheConfig;
            -079import 
            org.apache.hadoop.hbase.metrics.MetricRegistry;
            -080import 
            org.apache.hadoop.hbase.regionserver.Region.Operation;
            -081import 
            org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
            -082import 
            org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
            -083import 
            org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
            -084import 
            org.apache.hadoop.hbase.security.User;
            -085import 
            org.apache.hbase.thirdparty.com.google.common.collect.Lists;
            -086import 
            org.apache.hadoop.hbase.util.Bytes;
            -087import 
            org.apache.hadoop.hbase.util.CoprocessorClassLoader;
            -088import 
            org.apache.hadoop.hbase.util.Pair;
            -089import 
            org.apache.hadoop.hbase.wal.WALEdit;
            -090import 
            org.apache.hadoop.hbase.wal.WALKey;
            -091import 
            org.apache.yetus.audience.InterfaceAudience;
            -092import org.slf4j.Logger;
            -093import org.slf4j.LoggerFactory;
            -094
            -095/**
            -096 * Implements the coprocessor environment 
            and runtime support for coprocessors
            -097 * loaded within a {@link Region}.
            -098 */
            -099@InterfaceAudience.Private
            -100public class RegionCoprocessorHost
            -101extends 
            CoprocessorHostRegionCoprocessor, RegionCoprocessorEnvironment {
            -102
            -103  private static final Logger LOG = 
            LoggerFactory.getLogger(RegionCoprocessorHost.class);
            -104  // The shared data map
            -105  private static final 
            ReferenceMapString, ConcurrentMapString, Object SHARED_DATA_MAP 
            =
            -106  new 
            

            [12/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
             
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
            index 1546b5d..31c6fd0 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
            @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -private static class HRegionServer.CompactionChecker
            +private static class HRegionServer.CompactionChecker
             extends ScheduledChore
             
             
            @@ -233,7 +233,7 @@ extends 
             
             instance
            -private finalHRegionServer instance
            +private finalHRegionServer instance
             
             
             
            @@ -242,7 +242,7 @@ extends 
             
             majorCompactPriority
            -private finalint majorCompactPriority
            +private finalint majorCompactPriority
             
             
             
            @@ -251,7 +251,7 @@ extends 
             
             DEFAULT_PRIORITY
            -private static finalint DEFAULT_PRIORITY
            +private static finalint DEFAULT_PRIORITY
             
             See Also:
             Constant
             Field Values
            @@ -264,7 +264,7 @@ extends 
             
             iteration
            -privatelong iteration
            +privatelong iteration
             
             
             
            @@ -281,7 +281,7 @@ extends 
             
             CompactionChecker
            -CompactionChecker(HRegionServerh,
            +CompactionChecker(HRegionServerh,
               intsleepTime,
               Stoppablestopper)
             
            @@ -300,7 +300,7 @@ extends 
             
             chore
            -protectedvoidchore()
            +protectedvoidchore()
             Description copied from 
            class:ScheduledChore
             The task to execute on each scheduled execution of the 
            Chore
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
             
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
            index 38486a8..1239bd7 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
            @@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -private static class HRegionServer.MovedRegionInfo
            +private static class HRegionServer.MovedRegionInfo
             extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             
             
            @@ -218,7 +218,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             serverName
            -private finalServerName serverName
            +private finalServerName serverName
             
             
             
            @@ -227,7 +227,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             seqNum
            -private finallong seqNum
            +private finallong seqNum
             
             
             
            @@ -236,7 +236,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             ts
            -private finallong ts
            +private finallong ts
             
             
             
            @@ -253,7 +253,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             MovedRegionInfo
            -publicMovedRegionInfo(ServerNameserverName,
            +publicMovedRegionInfo(ServerNameserverName,
            longcloseSeqNum)
             
             
            @@ -271,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             getServerName
            -publicServerNamegetServerName()
            +publicServerNamegetServerName()
             
             
             
            @@ -280,7 +280,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             getSeqNum
            -publiclonggetSeqNum()
            +publiclonggetSeqNum()
             
             
             
            @@ -289,7 +289,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             
             
             getMoveTime
            -publiclonggetMoveTime()
            +publiclonggetMoveTime()
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
             
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
            index 95037fd..01fb6e7 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
            @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -protected static final class HRegionServer.MovedRegionsCleaner
            +protected static final class HRegionServer.MovedRegionsCleaner
             extends ScheduledChore
             implements Stoppable
             Creates a Chore thread to clean the moved region 
            cache.
            @@ -242,7 +242,7 @@ implements 
             
             regionServer
            -privateHRegionServer regionServer
            +privateHRegionServer regionServer
             
             
             
            @@ -251,7 +251,7 @@ implements 
             
             

            [12/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
            b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            index ef30022..abeccf1 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            @@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             ImmutableBytesWritable
            -TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
            +TableRecordReader.createKey()
             
             
             ImmutableBytesWritable
            -TableRecordReader.createKey()
            +TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
             
             
             ImmutableBytesWritable
            @@ -183,9 +183,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
            -TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            +TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            org.apache.hadoop.mapred.JobConfjob,
            -   
            org.apache.hadoop.mapred.Reporterreporter)
            +   org.apache.hadoop.mapred.Reporterreporter)
            +Builds a TableRecordReader.
            +
             
             
             org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
            @@ -195,11 +197,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
            -TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            +TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            org.apache.hadoop.mapred.JobConfjob,
            -   org.apache.hadoop.mapred.Reporterreporter)
            -Builds a TableRecordReader.
            -
            +   
            org.apache.hadoop.mapred.Reporterreporter)
             
             
             
            @@ -218,10 +218,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            -   Resultvalues,
            +IdentityTableMap.map(ImmutableBytesWritablekey,
            +   Resultvalue,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            +   org.apache.hadoop.mapred.Reporterreporter)
            +Pass the key, value to reduce
            +
             
             
             void
            @@ -234,21 +236,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -IdentityTableMap.map(ImmutableBytesWritablekey,
            -   Resultvalue,
            +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            +   Resultvalues,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            -Pass the key, value to reduce
            -
            +   org.apache.hadoop.mapred.Reporterreporter)
             
             
             boolean
            -TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
            +TableRecordReader.next(ImmutableBytesWritablekey,
             Resultvalue)
             
             
             boolean
            -TableRecordReader.next(ImmutableBytesWritablekey,
            +TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
             Resultvalue)
             
             
            @@ -281,10 +281,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            -   Resultvalues,
            +IdentityTableMap.map(ImmutableBytesWritablekey,
            +   Resultvalue,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            +   org.apache.hadoop.mapred.Reporterreporter)
            +Pass the key, value to reduce
            +
             
             
             void
            @@ -297,12 +299,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -IdentityTableMap.map(ImmutableBytesWritablekey,
            -   Resultvalue,
            +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            +   Resultvalues,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            -Pass the key, value to reduce
            -
            +   org.apache.hadoop.mapred.Reporterreporter)
             
             
             void
            @@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             private ImmutableBytesWritable
            -TableRecordReaderImpl.key
            +MultithreadedTableMapper.SubMapRecordReader.key
             
             
             private ImmutableBytesWritable
            @@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             private ImmutableBytesWritable
            -MultithreadedTableMapper.SubMapRecordReader.key
            +TableRecordReaderImpl.key
             
             
             (package private) ImmutableBytesWritable
            @@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             ImmutableBytesWritable
            -TableSnapshotInputFormat.TableSnapshotRegionRecordReader.getCurrentKey()
            

            [12/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
             
            b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            index 338b7a4..49b5557 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
            @@ -144,14 +144,16 @@
             
             
             
            -static HTableDescriptor
            -HTableDescriptor.parseFrom(byte[]bytes)
            +static HColumnDescriptor
            +HColumnDescriptor.parseFrom(byte[]bytes)
             Deprecated.
             
             
             
            -static ClusterId
            -ClusterId.parseFrom(byte[]bytes)
            +static HTableDescriptor
            +HTableDescriptor.parseFrom(byte[]bytes)
            +Deprecated.
            +
             
             
             static HRegionInfo
            @@ -163,10 +165,8 @@
             
             
             
            -static HColumnDescriptor
            -HColumnDescriptor.parseFrom(byte[]bytes)
            -Deprecated.
            -
            +static ClusterId
            +ClusterId.parseFrom(byte[]bytes)
             
             
             static SplitLogTask
            @@ -220,17 +220,17 @@
             TableDescriptorBuilder.ModifyableTableDescriptor.parseFrom(byte[]bytes)
             
             
            -static RegionInfo
            -RegionInfo.parseFrom(byte[]bytes)
            -
            -
             static ColumnFamilyDescriptor
             ColumnFamilyDescriptorBuilder.parseFrom(byte[]pbBytes)
             
            -
            +
             private static ColumnFamilyDescriptor
             ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.parseFrom(byte[]bytes)
             
            +
            +static RegionInfo
            +RegionInfo.parseFrom(byte[]bytes)
            +
             
             static RegionInfo
             RegionInfo.parseFrom(byte[]bytes,
            @@ -305,151 +305,151 @@
             ByteArrayComparable.parseFrom(byte[]pbBytes)
             
             
            -static SingleColumnValueExcludeFilter
            -SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
            +static ColumnPrefixFilter
            +ColumnPrefixFilter.parseFrom(byte[]pbBytes)
             
             
            -static ValueFilter
            -ValueFilter.parseFrom(byte[]pbBytes)
            +static ColumnCountGetFilter
            +ColumnCountGetFilter.parseFrom(byte[]pbBytes)
             
             
            -static SkipFilter
            -SkipFilter.parseFrom(byte[]pbBytes)
            +static RowFilter
            +RowFilter.parseFrom(byte[]pbBytes)
             
             
            -static FamilyFilter
            -FamilyFilter.parseFrom(byte[]pbBytes)
            +static FuzzyRowFilter
            +FuzzyRowFilter.parseFrom(byte[]pbBytes)
             
             
            -static BinaryPrefixComparator
            -BinaryPrefixComparator.parseFrom(byte[]pbBytes)
            +static BinaryComparator
            +BinaryComparator.parseFrom(byte[]pbBytes)
             
             
            -static NullComparator
            -NullComparator.parseFrom(byte[]pbBytes)
            +static RegexStringComparator
            +RegexStringComparator.parseFrom(byte[]pbBytes)
             
             
            -static BigDecimalComparator
            -BigDecimalComparator.parseFrom(byte[]pbBytes)
            +static Filter
            +Filter.parseFrom(byte[]pbBytes)
            +Concrete implementers can signal a failure condition in 
            their code by throwing an
            + https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException.
            +
             
             
            -static ColumnPrefixFilter
            -ColumnPrefixFilter.parseFrom(byte[]pbBytes)
            +static RandomRowFilter
            +RandomRowFilter.parseFrom(byte[]pbBytes)
             
             
            -static PageFilter
            -PageFilter.parseFrom(byte[]pbBytes)
            +static FirstKeyOnlyFilter
            +FirstKeyOnlyFilter.parseFrom(byte[]pbBytes)
             
             
            -static BitComparator
            -BitComparator.parseFrom(byte[]pbBytes)
            +static SkipFilter
            +SkipFilter.parseFrom(byte[]pbBytes)
             
             
            -static RowFilter
            -RowFilter.parseFrom(byte[]pbBytes)
            +static BinaryPrefixComparator
            +BinaryPrefixComparator.parseFrom(byte[]pbBytes)
             
             
            -static ColumnRangeFilter
            -ColumnRangeFilter.parseFrom(byte[]pbBytes)
            +static TimestampsFilter
            +TimestampsFilter.parseFrom(byte[]pbBytes)
             
             
            -static ColumnCountGetFilter
            -ColumnCountGetFilter.parseFrom(byte[]pbBytes)
            +static ValueFilter
            +ValueFilter.parseFrom(byte[]pbBytes)
             
             
            -static SubstringComparator
            -SubstringComparator.parseFrom(byte[]pbBytes)
            +static KeyOnlyFilter
            +KeyOnlyFilter.parseFrom(byte[]pbBytes)
             
             
            -static MultipleColumnPrefixFilter
            -MultipleColumnPrefixFilter.parseFrom(byte[]pbBytes)
            +static FamilyFilter
            +FamilyFilter.parseFrom(byte[]pbBytes)
             
             
            -static ColumnPaginationFilter
            -ColumnPaginationFilter.parseFrom(byte[]pbBytes)
            +static QualifierFilter
            +QualifierFilter.parseFrom(byte[]pbBytes)
             
             
            -static DependentColumnFilter
            -DependentColumnFilter.parseFrom(byte[]pbBytes)
            +static FilterList
            +FilterList.parseFrom(byte[]pbBytes)
             
             
            -static BinaryComparator
            -BinaryComparator.parseFrom(byte[]pbBytes)
            +static BigDecimalComparator
            +BigDecimalComparator.parseFrom(byte[]pbBytes)
             
             
            -static InclusiveStopFilter
            -InclusiveStopFilter.parseFrom(byte[]pbBytes)
            +static ColumnRangeFilter
            +ColumnRangeFilter.parseFrom(byte[]pbBytes)
             
             
            -static KeyOnlyFilter
            -KeyOnlyFilter.parseFrom(byte[]pbBytes)
            +static ColumnPaginationFilter
            +ColumnPaginationFilter.parseFrom(byte[]pbBytes)
             
             
            -static MultiRowRangeFilter
            -MultiRowRangeFilter.parseFrom(byte[]pbBytes)
            +static SubstringComparator
            +SubstringComparator.parseFrom(byte[]pbBytes)
             
             
            -static Filter
            -Filter.parseFrom(byte[]pbBytes)
            -Concrete implementers can signal a 

            [12/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
            index df5fa53..8fffb89 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
            @@ -42,1927 +42,2060 @@
             034import java.util.TreeMap;
             035import java.util.regex.Matcher;
             036import java.util.regex.Pattern;
            -037import 
            org.apache.hadoop.conf.Configuration;
            -038import 
            org.apache.hadoop.hbase.Cell.Type;
            -039import 
            org.apache.hadoop.hbase.client.Connection;
            -040import 
            org.apache.hadoop.hbase.client.ConnectionFactory;
            -041import 
            org.apache.hadoop.hbase.client.Consistency;
            -042import 
            org.apache.hadoop.hbase.client.Delete;
            -043import 
            org.apache.hadoop.hbase.client.Get;
            -044import 
            org.apache.hadoop.hbase.client.Mutation;
            -045import 
            org.apache.hadoop.hbase.client.Put;
            -046import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -047import 
            org.apache.hadoop.hbase.client.RegionInfoBuilder;
            -048import 
            org.apache.hadoop.hbase.client.RegionLocator;
            -049import 
            org.apache.hadoop.hbase.client.RegionReplicaUtil;
            -050import 
            org.apache.hadoop.hbase.client.RegionServerCallable;
            -051import 
            org.apache.hadoop.hbase.client.Result;
            -052import 
            org.apache.hadoop.hbase.client.ResultScanner;
            -053import 
            org.apache.hadoop.hbase.client.Scan;
            -054import 
            org.apache.hadoop.hbase.client.Table;
            -055import 
            org.apache.hadoop.hbase.client.TableState;
            -056import 
            org.apache.hadoop.hbase.exceptions.DeserializationException;
            -057import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
            -058import 
            org.apache.hadoop.hbase.master.RegionState;
            -059import 
            org.apache.hadoop.hbase.protobuf.ProtobufUtil;
            -060import 
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
            -061import 
            org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
            -062import 
            org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
            -063import 
            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
            -064import 
            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
            -065import 
            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
            -066import 
            org.apache.hadoop.hbase.util.Bytes;
            -067import 
            org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
            -068import 
            org.apache.hadoop.hbase.util.ExceptionUtil;
            -069import 
            org.apache.hadoop.hbase.util.Pair;
            -070import 
            org.apache.hadoop.hbase.util.PairOfSameType;
            -071import 
            org.apache.yetus.audience.InterfaceAudience;
            -072import org.slf4j.Logger;
            -073import org.slf4j.LoggerFactory;
            -074
            -075import 
            org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
            -076
            -077/**
            -078 * p
            -079 * Read/write operations on region and 
            assignment information store in codehbase:meta/code.
            -080 * /p
            +037import java.util.stream.Collectors;
            +038import java.util.stream.Stream;
            +039import 
            org.apache.hadoop.conf.Configuration;
            +040import 
            org.apache.hadoop.hbase.Cell.Type;
            +041import 
            org.apache.hadoop.hbase.client.Connection;
            +042import 
            org.apache.hadoop.hbase.client.ConnectionFactory;
            +043import 
            org.apache.hadoop.hbase.client.Consistency;
            +044import 
            org.apache.hadoop.hbase.client.Delete;
            +045import 
            org.apache.hadoop.hbase.client.Get;
            +046import 
            org.apache.hadoop.hbase.client.Mutation;
            +047import 
            org.apache.hadoop.hbase.client.Put;
            +048import 
            org.apache.hadoop.hbase.client.RegionInfo;
            +049import 
            org.apache.hadoop.hbase.client.RegionInfoBuilder;
            +050import 
            org.apache.hadoop.hbase.client.RegionLocator;
            +051import 
            org.apache.hadoop.hbase.client.RegionReplicaUtil;
            +052import 
            org.apache.hadoop.hbase.client.RegionServerCallable;
            +053import 
            org.apache.hadoop.hbase.client.Result;
            +054import 
            org.apache.hadoop.hbase.client.ResultScanner;
            +055import 
            org.apache.hadoop.hbase.client.Scan;
            +056import 
            org.apache.hadoop.hbase.client.Table;
            +057import 
            org.apache.hadoop.hbase.client.TableState;
            +058import 
            org.apache.hadoop.hbase.exceptions.DeserializationException;
            +059import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
            +060import 
            org.apache.hadoop.hbase.master.RegionState;
            +061import 
            org.apache.hadoop.hbase.master.RegionState.State;
            +062import 
            org.apache.hadoop.hbase.protobuf.ProtobufUtil;
            +063import 
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
            +064import 
            org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
            +065import 
            org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
            +066import 
            org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
            +067import 
            

            [12/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/client/Query.html
            --
            diff --git a/apidocs/org/apache/hadoop/hbase/client/Query.html 
            b/apidocs/org/apache/hadoop/hbase/client/Query.html
            index 3a6a0da..beb9848 100644
            --- a/apidocs/org/apache/hadoop/hbase/client/Query.html
            +++ b/apidocs/org/apache/hadoop/hbase/client/Query.html
            @@ -97,7 +97,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">java.lang.Object
            +https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">java.lang.Object
             
             
             org.apache.hadoop.hbase.client.Operation
            @@ -150,7 +150,7 @@ extends Field and Description
             
             
            -protected http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
             title="class or interface in java.util">Mapbyte[],TimeRange
            +protected https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
             title="class or interface in java.util">Mapbyte[],TimeRange
             colFamTimeRangeMap
             
             
            @@ -162,7 +162,7 @@ extends filter
             
             
            -protected http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
             title="class or interface in java.lang">Boolean
            +protected https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
             title="class or interface in java.lang">Boolean
             loadColumnFamiliesOnDemand
             
             
            @@ -223,7 +223,7 @@ extends getAuthorizations()
             
             
            -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
             title="class or interface in java.util">Mapbyte[],TimeRange
            +https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
             title="class or interface in java.util">Mapbyte[],TimeRange
             getColumnFamilyTimeRange()
             
             
            @@ -241,7 +241,7 @@ extends getIsolationLevel()
             
             
            -http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
             title="class or interface in java.lang">Boolean
            +https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
             title="class or interface in java.lang">Boolean
             getLoadColumnFamiliesOnDemandValue()
             Get the raw loadColumnFamiliesOnDemand setting; if it's not 
            set, can be null.
             
            @@ -254,11 +254,11 @@ extends 
             Query
            -setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
             title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in 
            java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms)
            +setACL(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
             title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in 
            java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms)
             
             
             Query
            -setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">Stringuser,
            +setACL(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">Stringuser,
               
            org.apache.hadoop.hbase.security.access.Permissionperms)
             
             
            @@ -326,8 +326,8 @@ extends 
             
             
            -Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
            -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
             title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
             title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
             title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
             title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
             title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
             title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
             /Object.html?is-external=true#notifyAll--" title="class or interface in 
            java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
             title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
             title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
            index 3152619..27db368 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
            @@ -39,29 +39,29 @@
             031import 
            java.util.concurrent.ExecutorService;
             032import 
            java.util.concurrent.ThreadLocalRandom;
             033import java.util.concurrent.TimeUnit;
            -034
            -035import 
            org.apache.hadoop.conf.Configuration;
            -036import org.apache.hadoop.hbase.Cell;
            -037import 
            org.apache.hadoop.hbase.CellComparator;
            -038import 
            org.apache.hadoop.hbase.HConstants;
            -039import 
            org.apache.hadoop.hbase.MasterNotRunningException;
            -040import 
            org.apache.hadoop.hbase.PrivateCellUtil;
            -041import 
            org.apache.hadoop.hbase.ServerName;
            -042import 
            org.apache.hadoop.hbase.TableName;
            -043import 
            org.apache.hadoop.hbase.client.metrics.ScanMetrics;
            -044import 
            org.apache.hadoop.hbase.ipc.HBaseRpcController;
            -045import 
            org.apache.hadoop.hbase.security.User;
            -046import 
            org.apache.hadoop.hbase.security.UserProvider;
            -047import 
            org.apache.hadoop.hbase.util.Bytes;
            -048import 
            org.apache.hadoop.hbase.util.ReflectionUtils;
            -049import 
            org.apache.hadoop.ipc.RemoteException;
            -050import org.apache.hadoop.net.DNS;
            -051import 
            org.apache.yetus.audience.InterfaceAudience;
            -052import org.slf4j.Logger;
            -053import org.slf4j.LoggerFactory;
            -054import 
            org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
            -055import 
            org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
            -056import 
            org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
            +034import 
            org.apache.hadoop.conf.Configuration;
            +035import org.apache.hadoop.hbase.Cell;
            +036import 
            org.apache.hadoop.hbase.CellComparator;
            +037import 
            org.apache.hadoop.hbase.HConstants;
            +038import 
            org.apache.hadoop.hbase.PrivateCellUtil;
            +039import 
            org.apache.hadoop.hbase.ServerName;
            +040import 
            org.apache.hadoop.hbase.TableName;
            +041import 
            org.apache.hadoop.hbase.client.metrics.ScanMetrics;
            +042import 
            org.apache.hadoop.hbase.ipc.HBaseRpcController;
            +043import 
            org.apache.hadoop.hbase.security.User;
            +044import 
            org.apache.hadoop.hbase.security.UserProvider;
            +045import 
            org.apache.hadoop.hbase.util.Bytes;
            +046import 
            org.apache.hadoop.hbase.util.ReflectionUtils;
            +047import 
            org.apache.hadoop.ipc.RemoteException;
            +048import org.apache.hadoop.net.DNS;
            +049import 
            org.apache.yetus.audience.InterfaceAudience;
            +050import org.slf4j.Logger;
            +051import org.slf4j.LoggerFactory;
            +052
            +053import 
            org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
            +054import 
            org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
            +055import 
            org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
            +056
             057import 
            org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
             058import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
             059import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
            @@ -163,11 +163,11 @@
             155}
             156
             157@Override
            -158public MasterKeepAliveConnection 
            getKeepAliveMasterService() throws MasterNotRunningException {
            +158public MasterKeepAliveConnection 
            getMaster() throws IOException {
             159  if (this.localHostClient instanceof 
            MasterService.BlockingInterface) {
             160return new 
            ShortCircuitMasterConnection((MasterService.BlockingInterface)this.localHostClient);
             161  }
            -162  return 
            super.getKeepAliveMasterService();
            +162  return super.getMaster();
             163}
             164  }
             165
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
            index 3152619..27db368 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
            @@ -39,29 +39,29 @@
             031import 
            java.util.concurrent.ExecutorService;
             032import 
            java.util.concurrent.ThreadLocalRandom;
             033import java.util.concurrent.TimeUnit;
            -034
            -035import 
            org.apache.hadoop.conf.Configuration;
            -036import org.apache.hadoop.hbase.Cell;
            -037import 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
            index 802b925..a3e80ab 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
            @@ -73,229 +73,229 @@
             065import 
            java.util.concurrent.TimeoutException;
             066import 
            java.util.concurrent.atomic.AtomicBoolean;
             067import 
            java.util.concurrent.atomic.AtomicInteger;
            -068import 
            java.util.concurrent.atomic.AtomicLong;
            -069import 
            java.util.concurrent.atomic.LongAdder;
            -070import java.util.concurrent.locks.Lock;
            -071import 
            java.util.concurrent.locks.ReadWriteLock;
            -072import 
            java.util.concurrent.locks.ReentrantReadWriteLock;
            -073import java.util.function.Function;
            -074import 
            org.apache.hadoop.conf.Configuration;
            -075import org.apache.hadoop.fs.FileStatus;
            -076import org.apache.hadoop.fs.FileSystem;
            -077import 
            org.apache.hadoop.fs.LocatedFileStatus;
            -078import org.apache.hadoop.fs.Path;
            -079import org.apache.hadoop.hbase.Cell;
            -080import 
            org.apache.hadoop.hbase.CellBuilderType;
            -081import 
            org.apache.hadoop.hbase.CellComparator;
            -082import 
            org.apache.hadoop.hbase.CellComparatorImpl;
            -083import 
            org.apache.hadoop.hbase.CellScanner;
            -084import 
            org.apache.hadoop.hbase.CellUtil;
            -085import 
            org.apache.hadoop.hbase.CompareOperator;
            -086import 
            org.apache.hadoop.hbase.CompoundConfiguration;
            -087import 
            org.apache.hadoop.hbase.DoNotRetryIOException;
            -088import 
            org.apache.hadoop.hbase.DroppedSnapshotException;
            -089import 
            org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
            -090import 
            org.apache.hadoop.hbase.HConstants;
            -091import 
            org.apache.hadoop.hbase.HConstants.OperationStatusCode;
            -092import 
            org.apache.hadoop.hbase.HDFSBlocksDistribution;
            -093import 
            org.apache.hadoop.hbase.HRegionInfo;
            -094import 
            org.apache.hadoop.hbase.KeyValue;
            -095import 
            org.apache.hadoop.hbase.KeyValueUtil;
            -096import 
            org.apache.hadoop.hbase.NamespaceDescriptor;
            -097import 
            org.apache.hadoop.hbase.NotServingRegionException;
            -098import 
            org.apache.hadoop.hbase.PrivateCellUtil;
            -099import 
            org.apache.hadoop.hbase.RegionTooBusyException;
            -100import 
            org.apache.hadoop.hbase.TableName;
            -101import org.apache.hadoop.hbase.Tag;
            -102import org.apache.hadoop.hbase.TagUtil;
            -103import 
            org.apache.hadoop.hbase.UnknownScannerException;
            -104import 
            org.apache.hadoop.hbase.client.Append;
            -105import 
            org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
            -106import 
            org.apache.hadoop.hbase.client.CompactionState;
            -107import 
            org.apache.hadoop.hbase.client.Delete;
            -108import 
            org.apache.hadoop.hbase.client.Durability;
            -109import 
            org.apache.hadoop.hbase.client.Get;
            -110import 
            org.apache.hadoop.hbase.client.Increment;
            -111import 
            org.apache.hadoop.hbase.client.IsolationLevel;
            -112import 
            org.apache.hadoop.hbase.client.Mutation;
            -113import 
            org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
            -114import 
            org.apache.hadoop.hbase.client.Put;
            -115import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -116import 
            org.apache.hadoop.hbase.client.RegionReplicaUtil;
            -117import 
            org.apache.hadoop.hbase.client.Result;
            -118import 
            org.apache.hadoop.hbase.client.RowMutations;
            -119import 
            org.apache.hadoop.hbase.client.Scan;
            -120import 
            org.apache.hadoop.hbase.client.TableDescriptor;
            -121import 
            org.apache.hadoop.hbase.client.TableDescriptorBuilder;
            -122import 
            org.apache.hadoop.hbase.conf.ConfigurationManager;
            -123import 
            org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
            -124import 
            org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
            -125import 
            org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
            -126import 
            org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
            -127import 
            org.apache.hadoop.hbase.exceptions.TimeoutIOException;
            -128import 
            org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
            -129import 
            org.apache.hadoop.hbase.filter.ByteArrayComparable;
            -130import 
            org.apache.hadoop.hbase.filter.FilterWrapper;
            -131import 
            org.apache.hadoop.hbase.filter.IncompatibleFilterException;
            -132import 
            org.apache.hadoop.hbase.io.HFileLink;
            -133import 
            org.apache.hadoop.hbase.io.HeapSize;
            -134import 
            org.apache.hadoop.hbase.io.TimeRange;
            -135import 
            org.apache.hadoop.hbase.io.hfile.HFile;
            -136import 
            org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
            -137import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
            -138import 
            org.apache.hadoop.hbase.ipc.RpcCall;
            -139import 
            org.apache.hadoop.hbase.ipc.RpcServer;
            -140import 
            org.apache.hadoop.hbase.monitoring.MonitoredTask;
            -141import 
            org.apache.hadoop.hbase.monitoring.TaskMonitor;
            -142import 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
            index bd13b53..802b925 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
            @@ -900,7600 +900,7598 @@
             892if 
            (this.getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {
             893  status.setStatus("Writing region 
            info on filesystem");
             894  fs.checkRegionInfoOnFilesystem();
            -895} else {
            -896  if (LOG.isDebugEnabled()) {
            -897LOG.debug("Skipping creation of 
            .regioninfo file for " + this.getRegionInfo());
            -898  }
            -899}
            -900
            -901// Initialize all the HStores
            -902status.setStatus("Initializing all 
            the Stores");
            -903long maxSeqId = 
            initializeStores(reporter, status);
            -904this.mvcc.advanceTo(maxSeqId);
            -905if 
            (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
            -906  CollectionHStore stores = 
            this.stores.values();
            -907  try {
            -908// update the stores that we are 
            replaying
            -909
            stores.forEach(HStore::startReplayingFromWAL);
            -910// Recover any edits if 
            available.
            -911maxSeqId = Math.max(maxSeqId,
            -912  
            replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
            status));
            -913// Make sure mvcc is up to max.
            -914this.mvcc.advanceTo(maxSeqId);
            -915  } finally {
            -916// update the stores that we are 
            done replaying
            -917
            stores.forEach(HStore::stopReplayingFromWAL);
            -918  }
            -919}
            -920this.lastReplayedOpenRegionSeqId = 
            maxSeqId;
            +895}
            +896
            +897// Initialize all the HStores
            +898status.setStatus("Initializing all 
            the Stores");
            +899long maxSeqId = 
            initializeStores(reporter, status);
            +900this.mvcc.advanceTo(maxSeqId);
            +901if 
            (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
            +902  CollectionHStore stores = 
            this.stores.values();
            +903  try {
            +904// update the stores that we are 
            replaying
            +905
            stores.forEach(HStore::startReplayingFromWAL);
            +906// Recover any edits if 
            available.
            +907maxSeqId = Math.max(maxSeqId,
            +908  
            replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
            status));
            +909// Make sure mvcc is up to max.
            +910this.mvcc.advanceTo(maxSeqId);
            +911  } finally {
            +912// update the stores that we are 
            done replaying
            +913
            stores.forEach(HStore::stopReplayingFromWAL);
            +914  }
            +915}
            +916this.lastReplayedOpenRegionSeqId = 
            maxSeqId;
            +917
            +918
            this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
            +919this.writestate.flushRequested = 
            false;
            +920this.writestate.compacting.set(0);
             921
            -922
            this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
            -923this.writestate.flushRequested = 
            false;
            -924this.writestate.compacting.set(0);
            -925
            -926if (this.writestate.writesEnabled) 
            {
            -927  // Remove temporary data left over 
            from old regions
            -928  status.setStatus("Cleaning up 
            temporary data from old regions");
            -929  fs.cleanupTempDir();
            -930}
            -931
            -932if (this.writestate.writesEnabled) 
            {
            -933  status.setStatus("Cleaning up 
            detritus from prior splits");
            -934  // Get rid of any splits or merges 
            that were lost in-progress.  Clean out
            -935  // these directories here on open.  
            We may be opening a region that was
            -936  // being split but we crashed in 
            the middle of it all.
            -937  fs.cleanupAnySplitDetritus();
            -938  fs.cleanupMergesDir();
            -939}
            -940
            -941// Initialize split policy
            -942this.splitPolicy = 
            RegionSplitPolicy.create(this, conf);
            -943
            -944// Initialize flush policy
            -945this.flushPolicy = 
            FlushPolicyFactory.create(this, conf);
            -946
            -947long lastFlushTime = 
            EnvironmentEdgeManager.currentTime();
            -948for (HStore store: stores.values()) 
            {
            -949  
            this.lastStoreFlushTimeMap.put(store, lastFlushTime);
            -950}
            -951
            -952// Use maximum of log sequenceid or 
            that which was found in stores
            -953// (particularly if no recovered 
            edits, seqid will be -1).
            -954long nextSeqid = maxSeqId;
            -955if (this.writestate.writesEnabled) 
            {
            -956  nextSeqid = 
            WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(),
            -957  this.fs.getRegionDir(), 
            nextSeqid, 1);
            -958} else {
            -959  nextSeqid++;
            -960}
            -961
            -962LOG.info("Onlined " + 
            this.getRegionInfo().getShortNameToLog() +
            -963  "; next sequenceid=" + 
            nextSeqid);
            +922if (this.writestate.writesEnabled) 
            {
            +923  

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
             
            b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
            index bc89c2d..aa1df59 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
            @@ -576,7 +576,7 @@ extends 
             
             EXPECTED_SPLIT_STATES
            -private staticRegionState.State[] EXPECTED_SPLIT_STATES
            +private staticRegionState.State[] EXPECTED_SPLIT_STATES
             
             
             
            @@ -686,7 +686,7 @@ extends 
             
             rollbackState
            -protectedvoidrollbackState(MasterProcedureEnvenv,
            +protectedvoidrollbackState(MasterProcedureEnvenv,
              
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
               throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException,
              http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
             title="class or interface in java.lang">InterruptedException
            @@ -710,7 +710,7 @@ extends 
             
             isRollbackSupported
            -protectedbooleanisRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
            +protectedbooleanisRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
             Description copied from 
            class:StateMachineProcedure
             Used by the default implementation of abort() to know if 
            the current state can be aborted
              and rollback can be triggered.
            @@ -726,7 +726,7 @@ extends 
             
             getState
            -protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetState(intstateId)
            +protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetState(intstateId)
             Description copied from 
            class:StateMachineProcedure
             Convert an ordinal (or state id) to an Enum (or more 
            descriptive) state object.
             
            @@ -745,7 +745,7 @@ extends 
             
             getStateId
            -protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
            +protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
             Description copied from 
            class:StateMachineProcedure
             Convert the Enum (or more descriptive) state object to an 
            ordinal (or state id).
             
            @@ -764,7 +764,7 @@ extends 
             
             getInitialState
            -protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetInitialState()
            +protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetInitialState()
             Description copied from 
            class:StateMachineProcedure
             Return the initial state object that will be used for the 
            first call to executeFromState().
             
            @@ -781,7 +781,7 @@ extends 
             
             serializeStateData
            -protectedvoidserializeStateData(ProcedureStateSerializerserializer)
            +protectedvoidserializeStateData(ProcedureStateSerializerserializer)
            throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Description copied from 
            class:Procedure
             The user-level code of the procedure may have some state to
            @@ -803,7 +803,7 @@ extends 
             
             deserializeStateData
            -protectedvoiddeserializeStateData(ProcedureStateSerializerserializer)
            +protectedvoiddeserializeStateData(ProcedureStateSerializerserializer)
              throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Description copied from 
            class:Procedure
             Called on store load to allow the user to decode the 
            previously serialized
            @@ -824,7 +824,7 @@ extends 
             
             toStringClassDetails
            -publicvoidtoStringClassDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
             title="class or interface in java.lang">StringBuildersb)
            +publicvoidtoStringClassDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
             title="class or interface in java.lang">StringBuildersb)
             Description copied from 
            class:Procedure
             Extend the toString() information with the procedure details
              e.g. className and parameters
            @@ -842,7 +842,7 @@ extends 
             
             getParentRegion
            -privateRegionInfogetParentRegion()
            +privateRegionInfogetParentRegion()
             
             
             
            @@ -851,7 +851,7 @@ extends 
             
             getTableOperationType
            -publicTableProcedureInterface.TableOperationTypegetTableOperationType()
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
            b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            index 49f85aa..6e37f0b 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
            @@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             ImmutableBytesWritable
            -TableRecordReader.createKey()
            +TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
             
             
             ImmutableBytesWritable
            -TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
            +TableRecordReader.createKey()
             
             
             ImmutableBytesWritable
            @@ -183,11 +183,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
            -TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            +TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            org.apache.hadoop.mapred.JobConfjob,
            -   org.apache.hadoop.mapred.Reporterreporter)
            -Builds a TableRecordReader.
            -
            +   
            org.apache.hadoop.mapred.Reporterreporter)
             
             
             org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
            @@ -197,9 +195,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
            -TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            +TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
            org.apache.hadoop.mapred.JobConfjob,
            -   
            org.apache.hadoop.mapred.Reporterreporter)
            +   org.apache.hadoop.mapred.Reporterreporter)
            +Builds a TableRecordReader.
            +
             
             
             
            @@ -218,12 +218,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -IdentityTableMap.map(ImmutableBytesWritablekey,
            -   Resultvalue,
            +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            +   Resultvalues,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            -Pass the key, value to reduce
            -
            +   org.apache.hadoop.mapred.Reporterreporter)
             
             
             void
            @@ -236,19 +234,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            -   Resultvalues,
            +IdentityTableMap.map(ImmutableBytesWritablekey,
            +   Resultvalue,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            +   org.apache.hadoop.mapred.Reporterreporter)
            +Pass the key, value to reduce
            +
             
             
             boolean
            -TableRecordReader.next(ImmutableBytesWritablekey,
            +TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
             Resultvalue)
             
             
             boolean
            -TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
            +TableRecordReader.next(ImmutableBytesWritablekey,
             Resultvalue)
             
             
            @@ -281,12 +281,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -IdentityTableMap.map(ImmutableBytesWritablekey,
            -   Resultvalue,
            +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            +   Resultvalues,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            -Pass the key, value to reduce
            -
            +   org.apache.hadoop.mapred.Reporterreporter)
             
             
             void
            @@ -299,10 +297,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             void
            -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
            -   Resultvalues,
            +IdentityTableMap.map(ImmutableBytesWritablekey,
            +   Resultvalue,
            org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
            -   org.apache.hadoop.mapred.Reporterreporter)
            +   org.apache.hadoop.mapred.Reporterreporter)
            +Pass the key, value to reduce
            +
             
             
             void
            @@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             private ImmutableBytesWritable
            -MultithreadedTableMapper.SubMapRecordReader.key
            +TableRecordReaderImpl.key
             
             
             private ImmutableBytesWritable
            @@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             private ImmutableBytesWritable
            -TableRecordReaderImpl.key
            +MultithreadedTableMapper.SubMapRecordReader.key
             
             
             (package private) ImmutableBytesWritable
            @@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
            utility methods.
             
             
             ImmutableBytesWritable
            -MultithreadedTableMapper.SubMapRecordReader.getCurrentKey()
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            index c7d05d1..abcb738 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            @@ -143,18 +143,18 @@
             
             
             void
            -HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
            -
            -
            -void
             NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
             
            -
            +
             void
             HFileDataBlockEncoder.saveMetadata(HFile.Writerwriter)
             Save metadata in HFile which will be written to disk
             
             
            +
            +void
            +HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
            +
             
             
             
            @@ -203,18 +203,18 @@
             
             
             
            -void
            -RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
            +abstract void
            +BloomContext.addLastBloomKey(HFile.Writerwriter)
            +Adds the last bloom key to the HFile Writer as part of 
            StorefileWriter close.
            +
             
             
             void
             RowBloomContext.addLastBloomKey(HFile.Writerwriter)
             
             
            -abstract void
            -BloomContext.addLastBloomKey(HFile.Writerwriter)
            -Adds the last bloom key to the HFile Writer as part of 
            StorefileWriter close.
            -
            +void
            +RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
             
             
             static BloomFilterWriter
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            index 479b9d3..274bfad 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            @@ -106,15 +106,15 @@
             
             
             
            +private HFileBlock.Writer
            +HFileBlockIndex.BlockIndexWriter.blockWriter
            +
            +
             protected HFileBlock.Writer
             HFileWriterImpl.blockWriter
             block writer
             
             
            -
            -private HFileBlock.Writer
            -HFileBlockIndex.BlockIndexWriter.blockWriter
            -
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            index 0c892c8..b293c97 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            @@ -136,15 +136,15 @@
             
             
             HFileContext
            -HFileBlockDecodingContext.getHFileContext()
            +HFileBlockEncodingContext.getHFileContext()
             
             
             HFileContext
            -HFileBlockDefaultDecodingContext.getHFileContext()
            +HFileBlockDecodingContext.getHFileContext()
             
             
             HFileContext
            -HFileBlockEncodingContext.getHFileContext()
            +HFileBlockDefaultDecodingContext.getHFileContext()
             
             
             HFileContext
            @@ -224,23 +224,23 @@
             
             
             private HFileContext
            +HFile.WriterFactory.fileContext
            +
            +
            +private HFileContext
             HFileBlock.fileContext
             Meta data that holds meta information on the 
            hfileblock.
             
             
            -
            +
             private HFileContext
             HFileBlock.Writer.fileContext
             Meta data that holds information about the hfileblock
             
             
            -
            -private HFileContext
            -HFileBlock.FSReaderImpl.fileContext
            -
             
             private HFileContext
            -HFile.WriterFactory.fileContext
            +HFileBlock.FSReaderImpl.fileContext
             
             
             private HFileContext
            @@ -277,20 +277,20 @@
             
             
             HFileContext
            -HFileWriterImpl.getFileContext()
            -
            -
            -HFileContext
             HFile.Writer.getFileContext()
             Return the file context for the HFile this writer belongs 
            to
             
             
            -
            +
             HFileContext
             HFile.Reader.getFileContext()
             Return the file context of the HFile this reader belongs 
            to
             
             
            +
            +HFileContext
            +HFileWriterImpl.getFileContext()
            +
             
             HFileContext
             HFileReaderImpl.getFileContext()
            @@ -323,35 +323,35 @@
             
             
             HFileBlockDecodingContext
            -HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
            -
            -
            -HFileBlockDecodingContext
             NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
             
            -
            +
             HFileBlockDecodingContext
             HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
             create a encoder specific decoding context for 
            reading.
             
             
            -
            -HFileBlockEncodingContext
            -HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[]dummyHeader,
            -   HFileContextfileContext)
            -
             
            +HFileBlockDecodingContext
            +HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
            +
            +
             HFileBlockEncodingContext
             NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[]dummyHeader,
            HFileContextmeta)
             
            -
            +
             HFileBlockEncodingContext
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
            b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
            index 0c342b2..bb2794a 100644
            --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
            +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
            @@ -151,115 +151,115 @@
             
             
             Filter.ReturnCode
            -FilterListWithAND.filterCell(Cellc)
            +ColumnPrefixFilter.filterCell(Cellcell)
             
             
             Filter.ReturnCode
            -ValueFilter.filterCell(Cellc)
            +ColumnCountGetFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -SkipFilter.filterCell(Cellc)
            +RowFilter.filterCell(Cellv)
             
             
             Filter.ReturnCode
            -FamilyFilter.filterCell(Cellc)
            +FuzzyRowFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -ColumnPrefixFilter.filterCell(Cellcell)
            +Filter.filterCell(Cellc)
            +A way to filter based on the column family, column 
            qualifier and/or the column value.
            +
             
             
             Filter.ReturnCode
            -PageFilter.filterCell(Cellignored)
            +RandomRowFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -RowFilter.filterCell(Cellv)
            +FirstKeyOnlyFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -ColumnRangeFilter.filterCell(Cellc)
            +SkipFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -ColumnCountGetFilter.filterCell(Cellc)
            +TimestampsFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -MultipleColumnPrefixFilter.filterCell(Cellc)
            +ValueFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -ColumnPaginationFilter.filterCell(Cellc)
            +KeyOnlyFilter.filterCell(Cellignored)
             
             
             Filter.ReturnCode
            -DependentColumnFilter.filterCell(Cellc)
            +FamilyFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -FilterListWithOR.filterCell(Cellc)
            +QualifierFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -InclusiveStopFilter.filterCell(Cellc)
            +FilterList.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -KeyOnlyFilter.filterCell(Cellignored)
            +ColumnRangeFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -MultiRowRangeFilter.filterCell(Cellignored)
            +ColumnPaginationFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -Filter.filterCell(Cellc)
            -A way to filter based on the column family, column 
            qualifier and/or the column value.
            -
            +FilterListWithAND.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -FirstKeyOnlyFilter.filterCell(Cellc)
            +WhileMatchFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -WhileMatchFilter.filterCell(Cellc)
            +MultiRowRangeFilter.filterCell(Cellignored)
             
             
             Filter.ReturnCode
            -FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
            -Deprecated.
            -
            +PrefixFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -TimestampsFilter.filterCell(Cellc)
            +DependentColumnFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -FuzzyRowFilter.filterCell(Cellc)
            +FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
            +Deprecated.
            +
             
             
             Filter.ReturnCode
            -FilterList.filterCell(Cellc)
            +PageFilter.filterCell(Cellignored)
             
             
             Filter.ReturnCode
            -RandomRowFilter.filterCell(Cellc)
            +FilterListWithOR.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -PrefixFilter.filterCell(Cellc)
            +InclusiveStopFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -SingleColumnValueFilter.filterCell(Cellc)
            +MultipleColumnPrefixFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            -QualifierFilter.filterCell(Cellc)
            +SingleColumnValueFilter.filterCell(Cellc)
             
             
             Filter.ReturnCode
            @@ -275,158 +275,158 @@
             
             
             Filter.ReturnCode
            -ValueFilter.filterKeyValue(Cellc)
            +ColumnPrefixFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -SkipFilter.filterKeyValue(Cellc)
            +ColumnCountGetFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -FilterListBase.filterKeyValue(Cellc)
            +RowFilter.filterKeyValue(Cellc)
            +Deprecated.
            +
             
             
             Filter.ReturnCode
            -FamilyFilter.filterKeyValue(Cellc)
            +FuzzyRowFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -ColumnPrefixFilter.filterKeyValue(Cellc)
            -Deprecated.
            +Filter.filterKeyValue(Cellc)
            +Deprecated.
            +As of release 2.0.0, this 
            will be removed in HBase 3.0.0.
            + Instead use filterCell(Cell)
            +
             
             
             
             Filter.ReturnCode
            -PageFilter.filterKeyValue(Cellc)
            +RandomRowFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -RowFilter.filterKeyValue(Cellc)
            +FirstKeyOnlyFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -ColumnRangeFilter.filterKeyValue(Cellc)
            +SkipFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -ColumnCountGetFilter.filterKeyValue(Cellc)
            +TimestampsFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -MultipleColumnPrefixFilter.filterKeyValue(Cellc)
            +ValueFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -ColumnPaginationFilter.filterKeyValue(Cellc)
            +KeyOnlyFilter.filterKeyValue(Cellignored)
             Deprecated.
             
             
             
             Filter.ReturnCode
            -DependentColumnFilter.filterKeyValue(Cellc)
            +FamilyFilter.filterKeyValue(Cellc)
             Deprecated.
             
             
             
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            index c7d05d1..abcb738 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
            @@ -143,18 +143,18 @@
             
             
             void
            -HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
            -
            -
            -void
             NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
             
            -
            +
             void
             HFileDataBlockEncoder.saveMetadata(HFile.Writerwriter)
             Save metadata in HFile which will be written to disk
             
             
            +
            +void
            +HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
            +
             
             
             
            @@ -203,18 +203,18 @@
             
             
             
            -void
            -RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
            +abstract void
            +BloomContext.addLastBloomKey(HFile.Writerwriter)
            +Adds the last bloom key to the HFile Writer as part of 
            StorefileWriter close.
            +
             
             
             void
             RowBloomContext.addLastBloomKey(HFile.Writerwriter)
             
             
            -abstract void
            -BloomContext.addLastBloomKey(HFile.Writerwriter)
            -Adds the last bloom key to the HFile Writer as part of 
            StorefileWriter close.
            -
            +void
            +RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
             
             
             static BloomFilterWriter
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            index 479b9d3..274bfad 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
            @@ -106,15 +106,15 @@
             
             
             
            +private HFileBlock.Writer
            +HFileBlockIndex.BlockIndexWriter.blockWriter
            +
            +
             protected HFileBlock.Writer
             HFileWriterImpl.blockWriter
             block writer
             
             
            -
            -private HFileBlock.Writer
            -HFileBlockIndex.BlockIndexWriter.blockWriter
            -
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            index 0c892c8..b293c97 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
            @@ -136,15 +136,15 @@
             
             
             HFileContext
            -HFileBlockDecodingContext.getHFileContext()
            +HFileBlockEncodingContext.getHFileContext()
             
             
             HFileContext
            -HFileBlockDefaultDecodingContext.getHFileContext()
            +HFileBlockDecodingContext.getHFileContext()
             
             
             HFileContext
            -HFileBlockEncodingContext.getHFileContext()
            +HFileBlockDefaultDecodingContext.getHFileContext()
             
             
             HFileContext
            @@ -224,23 +224,23 @@
             
             
             private HFileContext
            +HFile.WriterFactory.fileContext
            +
            +
            +private HFileContext
             HFileBlock.fileContext
             Meta data that holds meta information on the 
            hfileblock.
             
             
            -
            +
             private HFileContext
             HFileBlock.Writer.fileContext
             Meta data that holds information about the hfileblock
             
             
            -
            -private HFileContext
            -HFileBlock.FSReaderImpl.fileContext
            -
             
             private HFileContext
            -HFile.WriterFactory.fileContext
            +HFileBlock.FSReaderImpl.fileContext
             
             
             private HFileContext
            @@ -277,20 +277,20 @@
             
             
             HFileContext
            -HFileWriterImpl.getFileContext()
            -
            -
            -HFileContext
             HFile.Writer.getFileContext()
             Return the file context for the HFile this writer belongs 
            to
             
             
            -
            +
             HFileContext
             HFile.Reader.getFileContext()
             Return the file context of the HFile this reader belongs 
            to
             
             
            +
            +HFileContext
            +HFileWriterImpl.getFileContext()
            +
             
             HFileContext
             HFileReaderImpl.getFileContext()
            @@ -323,35 +323,35 @@
             
             
             HFileBlockDecodingContext
            -HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
            -
            -
            -HFileBlockDecodingContext
             NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
             
            -
            +
             HFileBlockDecodingContext
             HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
             create a encoder specific decoding context for 
            reading.
             
             
            -
            -HFileBlockEncodingContext
            -HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[]dummyHeader,
            -   HFileContextfileContext)
            -
             
            +HFileBlockDecodingContext
            +HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
            +
            +
             HFileBlockEncodingContext
             NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[]dummyHeader,
            HFileContextmeta)
             
            -
            +
             HFileBlockEncodingContext
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
            b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
            index 7244ce2..5f7ce59 100644
            --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
            +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
            @@ -114,15 +114,15 @@
             
             
             private PriorityFunction
            -SimpleRpcScheduler.priority
            +RpcExecutor.priority
             
             
             private PriorityFunction
            -RpcExecutor.priority
            +RpcExecutor.CallPriorityComparator.priority
             
             
             private PriorityFunction
            -RpcExecutor.CallPriorityComparator.priority
            +SimpleRpcScheduler.priority
             
             
             
            @@ -319,7 +319,7 @@
             
             
             RpcScheduler
            -RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
            +FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
               PriorityFunctionpriority)
             Deprecated.
             
            @@ -333,18 +333,16 @@
             
             
             RpcScheduler
            -FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
            +RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
               PriorityFunctionpriority)
             Deprecated.
             
             
             
             RpcScheduler
            -RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
            +FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
               PriorityFunctionpriority,
            -  Abortableserver)
            -Constructs a RpcScheduler.
            -
            +  Abortableserver)
             
             
             RpcScheduler
            @@ -354,9 +352,11 @@
             
             
             RpcScheduler
            -FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
            +RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
               PriorityFunctionpriority,
            -  Abortableserver)
            +  Abortableserver)
            +Constructs a RpcScheduler.
            +
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html 
            b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
            index 4a25f5c..6d59fb7 100644
            --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
            +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
            @@ -123,14 +123,14 @@
             
             
             void
            -ServerCall.setCallBack(RpcCallbackcallback)
            -
            -
            -void
             RpcCallContext.setCallBack(RpcCallbackcallback)
             Sets a callback which has to be executed at the end of this 
            RPC call.
             
             
            +
            +void
            +ServerCall.setCallBack(RpcCallbackcallback)
            +
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html 
            b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
            index fab4d7a..baa4e5e 100644
            --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
            +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
            @@ -131,32 +131,24 @@
             
             
             
            -private RpcControllerFactory
            -ConnectionImplementation.rpcControllerFactory
            -
            -
            -protected RpcControllerFactory
            -ClientScanner.rpcControllerFactory
            -
            -
             protected RpcControllerFactory
             RegionAdminServiceCallable.rpcControllerFactory
             
             
            -(package private) RpcControllerFactory
            -AsyncConnectionImpl.rpcControllerFactory
            +private RpcControllerFactory
            +ConnectionImplementation.rpcControllerFactory
             
             
            -private RpcControllerFactory
            -HTable.rpcControllerFactory
            +(package private) RpcControllerFactory
            +AsyncConnectionImpl.rpcControllerFactory
             
             
             private RpcControllerFactory
            -HBaseAdmin.rpcControllerFactory
            +HTable.rpcControllerFactory
             
             
             private RpcControllerFactory
            -SecureBulkLoadClient.rpcControllerFactory
            +RpcRetryingCallerWithReadReplicas.rpcControllerFactory
             
             
             protected RpcControllerFactory
            @@ -164,7 +156,15 @@
             
             
             private RpcControllerFactory
            -RpcRetryingCallerWithReadReplicas.rpcControllerFactory
            +HBaseAdmin.rpcControllerFactory
            +
            +
            +private RpcControllerFactory
            +SecureBulkLoadClient.rpcControllerFactory
            +
            +
            +protected RpcControllerFactory
            +ClientScanner.rpcControllerFactory
             
             
             (package private) RpcControllerFactory
            @@ -181,11 +181,11 @@
             
             
             RpcControllerFactory
            -ConnectionImplementation.getRpcControllerFactory()
            +ClusterConnection.getRpcControllerFactory()
             
             
             RpcControllerFactory
            -ClusterConnection.getRpcControllerFactory()
            +ConnectionImplementation.getRpcControllerFactory()
             
             
             private RpcControllerFactory
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcExecutor.Handler.html
            --
            diff --git 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
            index 77fb9b5..c4e8c8b 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
            @@ -165,520 +165,519 @@
             157E env = 
            checkAndLoadInstance(implClass, priority, conf);
             158if (env != null) {
             159  
            this.coprocEnvironments.add(env);
            -160  LOG.info(
            -161  "System coprocessor " + 
            className + " was loaded " + "successfully with priority (" + priority + 
            ").");
            -162  ++priority;
            -163}
            -164  } catch (Throwable t) {
            -165// We always abort if system 
            coprocessors cannot be loaded
            -166abortServer(className, t);
            -167  }
            -168}
            -169  }
            -170
            -171  /**
            -172   * Load a coprocessor implementation 
            into the host
            -173   * @param path path to implementation 
            jar
            -174   * @param className the main class 
            name
            -175   * @param priority chaining priority
            -176   * @param conf configuration for 
            coprocessor
            -177   * @throws java.io.IOException 
            Exception
            -178   */
            -179  public E load(Path path, String 
            className, int priority,
            -180  Configuration conf) throws 
            IOException {
            -181String[] includedClassPrefixes = 
            null;
            -182if 
            (conf.get(HConstants.CP_HTD_ATTR_INCLUSION_KEY) != null){
            -183  String prefixes = 
            conf.get(HConstants.CP_HTD_ATTR_INCLUSION_KEY);
            -184  includedClassPrefixes = 
            prefixes.split(";");
            -185}
            -186return load(path, className, 
            priority, conf, includedClassPrefixes);
            -187  }
            -188
            -189  /**
            -190   * Load a coprocessor implementation 
            into the host
            -191   * @param path path to implementation 
            jar
            -192   * @param className the main class 
            name
            -193   * @param priority chaining priority
            -194   * @param conf configuration for 
            coprocessor
            -195   * @param includedClassPrefixes class 
            name prefixes to include
            -196   * @throws java.io.IOException 
            Exception
            -197   */
            -198  public E load(Path path, String 
            className, int priority,
            -199  Configuration conf, String[] 
            includedClassPrefixes) throws IOException {
            -200Class? implClass;
            -201LOG.debug("Loading coprocessor class 
            " + className + " with path " +
            -202path + " and priority " + 
            priority);
            -203
            -204ClassLoader cl = null;
            -205if (path == null) {
            -206  try {
            -207implClass = 
            getClass().getClassLoader().loadClass(className);
            -208  } catch (ClassNotFoundException e) 
            {
            -209throw new IOException("No jar 
            path specified for " + className);
            -210  }
            -211} else {
            -212  cl = 
            CoprocessorClassLoader.getClassLoader(
            -213path, 
            getClass().getClassLoader(), pathPrefix, conf);
            -214  try {
            -215implClass = 
            ((CoprocessorClassLoader)cl).loadClass(className, includedClassPrefixes);
            -216  } catch (ClassNotFoundException e) 
            {
            -217throw new IOException("Cannot 
            load external coprocessor class " + className, e);
            -218  }
            -219}
            -220
            -221//load custom code for coprocessor
            -222Thread currentThread = 
            Thread.currentThread();
            -223ClassLoader hostClassLoader = 
            currentThread.getContextClassLoader();
            -224try{
            -225  // switch temporarily to the thread 
            classloader for custom CP
            -226  
            currentThread.setContextClassLoader(cl);
            -227  E cpInstance = 
            checkAndLoadInstance(implClass, priority, conf);
            -228  return cpInstance;
            -229} finally {
            -230  // restore the fresh (host) 
            classloader
            -231  
            currentThread.setContextClassLoader(hostClassLoader);
            -232}
            -233  }
            -234
            -235  @VisibleForTesting
            -236  public void load(Class? extends 
            C implClass, int priority, Configuration conf)
            -237  throws IOException {
            -238E env = 
            checkAndLoadInstance(implClass, priority, conf);
            -239coprocEnvironments.add(env);
            -240  }
            -241
            -242  /**
            -243   * @param implClass Implementation 
            class
            -244   * @param priority priority
            -245   * @param conf configuration
            -246   * @throws java.io.IOException 
            Exception
            -247   */
            -248  public E 
            checkAndLoadInstance(Class? implClass, int priority, Configuration 
            conf)
            -249  throws IOException {
            -250// create the instance
            -251C impl;
            -252try {
            -253  impl = 
            checkAndGetInstance(implClass);
            -254  if (impl == null) {
            -255LOG.error("Cannot load 
            coprocessor " + implClass.getSimpleName());
            -256return null;
            -257  }
            -258} catch 
            (InstantiationException|IllegalAccessException e) {
            -259  throw new IOException(e);
            -260}
            -261// create the environment
            -262E env = createEnvironment(impl, 
            priority, loadSequence.incrementAndGet(), conf);
            -263assert env instanceof 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
            index 88c511a..ccdd6a2 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
            @@ -113,7 +113,7 @@
             105throws IOException {
             106  try {
             107// skip all other tables
            -108if (Bytes.equals(table, 
            key.getTablename().getName())) {
            +108if (Bytes.equals(table, 
            key.getTableName().getName())) {
             109  for (Cell cell : 
            value.getCells()) {
             110if 
            (WALEdit.isMetaEditFamily(cell)) {
             111  continue;
            @@ -153,10 +153,10 @@
             145public void map(WALKey key, WALEdit 
            value, Context context)
             146throws IOException {
             147  try {
            -148if (tables.isEmpty() || 
            tables.containsKey(key.getTablename())) {
            +148if (tables.isEmpty() || 
            tables.containsKey(key.getTableName())) {
             149  TableName targetTable = 
            tables.isEmpty() ?
            -150key.getTablename() :
            -151
            tables.get(key.getTablename());
            +150key.getTableName() :
            +151
            tables.get(key.getTableName());
             152  ImmutableBytesWritable tableOut 
            = new ImmutableBytesWritable(targetTable.getName());
             153  Put put = null;
             154  Delete del = null;
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
            index 88c511a..ccdd6a2 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
            @@ -113,7 +113,7 @@
             105throws IOException {
             106  try {
             107// skip all other tables
            -108if (Bytes.equals(table, 
            key.getTablename().getName())) {
            +108if (Bytes.equals(table, 
            key.getTableName().getName())) {
             109  for (Cell cell : 
            value.getCells()) {
             110if 
            (WALEdit.isMetaEditFamily(cell)) {
             111  continue;
            @@ -153,10 +153,10 @@
             145public void map(WALKey key, WALEdit 
            value, Context context)
             146throws IOException {
             147  try {
            -148if (tables.isEmpty() || 
            tables.containsKey(key.getTablename())) {
            +148if (tables.isEmpty() || 
            tables.containsKey(key.getTableName())) {
             149  TableName targetTable = 
            tables.isEmpty() ?
            -150key.getTablename() :
            -151
            tables.get(key.getTablename());
            +150key.getTableName() :
            +151
            tables.get(key.getTableName());
             152  ImmutableBytesWritable tableOut 
            = new ImmutableBytesWritable(targetTable.getName());
             153  Put put = null;
             154  Delete del = null;
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
            index 88c511a..ccdd6a2 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
            @@ -113,7 +113,7 @@
             105throws IOException {
             106  try {
             107// skip all other tables
            -108if (Bytes.equals(table, 
            key.getTablename().getName())) {
            +108if (Bytes.equals(table, 
            key.getTableName().getName())) {
             109  for (Cell cell : 
            value.getCells()) {
             110if 
            (WALEdit.isMetaEditFamily(cell)) {
             111  continue;
            @@ -153,10 +153,10 @@
             145public void map(WALKey key, WALEdit 
            value, Context context)
             146throws IOException {
             147  try {
            -148if (tables.isEmpty() || 
            tables.containsKey(key.getTablename())) {
            +148if (tables.isEmpty() || 
            tables.containsKey(key.getTableName())) {
             149  TableName targetTable = 
            tables.isEmpty() ?
            -150key.getTablename() :
            -151
            tables.get(key.getTablename());
            +150key.getTableName() :
            +151
            tables.get(key.getTableName());
             152  ImmutableBytesWritable tableOut 
            = new ImmutableBytesWritable(targetTable.getName());
             153  Put put = null;
             154  Delete del = 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html 
            b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
            index 2f1d88a..2604e6c 100644
            --- 
            a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
            +++ 
            b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
            @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -public class TestAsyncTableScanMetrics
            +public class TestAsyncTableScanMetrics
             extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             
             
            @@ -153,34 +153,38 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             CF
             
             
            +static HBaseClassTestRule
            +CLASS_RULE
            +
            +
             private static 
            org.apache.hadoop.hbase.client.AsyncConnection
             CONN
             
            -
            +
             private static byte[]
             CQ
             
            -
            +
             TestAsyncTableScanMetrics.ScanWithMetrics
             method
             
            -
            +
             http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String
             methodName
             
            -
            +
             private static int
             NUM_REGIONS
             
            -
            +
             private static 
            org.apache.hadoop.hbase.TableName
             TABLE_NAME
             
            -
            +
             private static HBaseTestingUtility
             UTIL
             
            -
            +
             private static byte[]
             VALUE
             
            @@ -270,13 +274,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             Field Detail
            +
            +
            +
            +
            +
            +CLASS_RULE
            +public static finalHBaseClassTestRule CLASS_RULE
            +
            +
             
             
             
             
             
             UTIL
            -private static finalHBaseTestingUtility UTIL
            +private static finalHBaseTestingUtility UTIL
             
             
             
            @@ -285,7 +298,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             TABLE_NAME
            -private static finalorg.apache.hadoop.hbase.TableName TABLE_NAME
            +private static finalorg.apache.hadoop.hbase.TableName TABLE_NAME
             
             
             
            @@ -294,7 +307,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             CF
            -private static finalbyte[] CF
            +private static finalbyte[] CF
             
             
             
            @@ -303,7 +316,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             CQ
            -private static finalbyte[] CQ
            +private static finalbyte[] CQ
             
             
             
            @@ -312,7 +325,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             VALUE
            -private static finalbyte[] VALUE
            +private static finalbyte[] VALUE
             
             
             
            @@ -321,7 +334,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             CONN
            -private staticorg.apache.hadoop.hbase.client.AsyncConnection CONN
            +private staticorg.apache.hadoop.hbase.client.AsyncConnection CONN
             
             
             
            @@ -330,7 +343,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             NUM_REGIONS
            -private staticint NUM_REGIONS
            +private staticint NUM_REGIONS
             
             
             
            @@ -339,7 +352,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             methodName
            -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String methodName
            +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String methodName
             
             
             
            @@ -348,7 +361,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             method
            -publicTestAsyncTableScanMetrics.ScanWithMetrics 
            method
            +publicTestAsyncTableScanMetrics.ScanWithMetrics 
            method
             
             
             
            @@ -365,7 +378,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             TestAsyncTableScanMetrics
            -publicTestAsyncTableScanMetrics()
            +publicTestAsyncTableScanMetrics()
             
             
             
            @@ -382,7 +395,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             params
            -public statichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object[]params()
            +public statichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object[]params()
             
             
             
            @@ -391,7 +404,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             setUp
            -public staticvoidsetUp()
            +public staticvoidsetUp()
               throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
             title="class or interface in java.lang">Exception
             
             Throws:
            @@ -405,7 +418,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             tearDown
            -public staticvoidtearDown()
            +public staticvoidtearDown()
              

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
            index 4febd01..ef680de 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
            @@ -71,203 +71,203 @@
             063   * @return The new HashMap of RS log 
            time stamps after the log roll for this incremental backup.
             064   * @throws IOException exception
             065   */
            -066  public HashMapString, Long 
            getIncrBackupLogFileMap()
            -067  throws IOException {
            -068ListString logList;
            -069HashMapString, Long 
            newTimestamps;
            -070HashMapString, Long 
            previousTimestampMins;
            -071
            -072String savedStartCode = 
            readBackupStartCode();
            -073
            -074// key: tableName
            -075// value: 
            RegionServer,PreviousTimeStamp
            -076HashMapTableName, 
            HashMapString, Long previousTimestampMap = readLogTimestampMap();
            -077
            -078previousTimestampMins = 
            BackupUtils.getRSLogTimestampMins(previousTimestampMap);
            -079
            -080if (LOG.isDebugEnabled()) {
            -081  LOG.debug("StartCode " + 
            savedStartCode + "for backupID " + backupInfo.getBackupId());
            -082}
            -083// get all new log files from .logs 
            and .oldlogs after last TS and before new timestamp
            -084if (savedStartCode == null || 
            previousTimestampMins == null
            -085|| 
            previousTimestampMins.isEmpty()) {
            -086  throw new IOException(
            -087  "Cannot read any previous back 
            up timestamps from backup system table. "
            -088  + "In order to create an 
            incremental backup, at least one full backup is needed.");
            -089}
            -090
            -091LOG.info("Execute roll log procedure 
            for incremental backup ...");
            -092HashMapString, String props = 
            new HashMapString, String();
            -093props.put("backupRoot", 
            backupInfo.getBackupRootDir());
            -094
            -095try (Admin admin = conn.getAdmin()) 
            {
            -096  
            admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
            -097
            LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
            -098}
            -099newTimestamps = 
            readRegionServerLastLogRollResult();
            -100
            -101logList = 
            getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
            savedStartCode);
            -102ListWALItem 
            logFromSystemTable =
            -103
            getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
            getBackupInfo()
            -104.getBackupRootDir());
            -105logList = 
            excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
            -106
            backupInfo.setIncrBackupFileList(logList);
            -107
            -108return newTimestamps;
            -109  }
            -110
            -111  /**
            -112   * Get list of WAL files eligible for 
            incremental backup
            +066  public HashMapString, Long 
            getIncrBackupLogFileMap() throws IOException {
            +067ListString logList;
            +068HashMapString, Long 
            newTimestamps;
            +069HashMapString, Long 
            previousTimestampMins;
            +070
            +071String savedStartCode = 
            readBackupStartCode();
            +072
            +073// key: tableName
            +074// value: 
            RegionServer,PreviousTimeStamp
            +075HashMapTableName, 
            HashMapString, Long previousTimestampMap = readLogTimestampMap();
            +076
            +077previousTimestampMins = 
            BackupUtils.getRSLogTimestampMins(previousTimestampMap);
            +078
            +079if (LOG.isDebugEnabled()) {
            +080  LOG.debug("StartCode " + 
            savedStartCode + "for backupID " + backupInfo.getBackupId());
            +081}
            +082// get all new log files from .logs 
            and .oldlogs after last TS and before new timestamp
            +083if (savedStartCode == null || 
            previousTimestampMins == null
            +084|| 
            previousTimestampMins.isEmpty()) {
            +085  throw new IOException(
            +086  "Cannot read any previous back 
            up timestamps from backup system table. "
            +087  + "In order to create an 
            incremental backup, at least one full backup is needed.");
            +088}
            +089
            +090LOG.info("Execute roll log procedure 
            for incremental backup ...");
            +091HashMapString, String props = 
            new HashMap();
            +092props.put("backupRoot", 
            backupInfo.getBackupRootDir());
            +093
            +094try (Admin admin = conn.getAdmin()) 
            {
            +095  
            admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
            +096
            LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
            +097}
            +098newTimestamps = 
            readRegionServerLastLogRollResult();
            +099
            +100logList = 
            getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
            savedStartCode);
            +101ListWALItem 
            logFromSystemTable =
            +102
            getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
            getBackupInfo()
            +103.getBackupRootDir());
            +104logList = 
            excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
            +105
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html 
            b/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
            index 26337ab..47c5c80 100644
            --- a/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
            +++ b/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
            @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -public class TestScannersWithLabels
            +public class TestScannersWithLabels
             extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             
             
            @@ -295,7 +295,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             TABLE
            -private static finalorg.apache.hadoop.hbase.TableName TABLE
            +private static finalorg.apache.hadoop.hbase.TableName TABLE
             
             
             
            @@ -304,7 +304,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             CFA
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String CFA
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String CFA
             
             See Also:
             Constant
             Field Values
            @@ -317,7 +317,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             CFB
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String CFB
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String CFB
             
             See Also:
             Constant
             Field Values
            @@ -330,7 +330,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             COLUMN_1
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String COLUMN_1
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String COLUMN_1
             
             See Also:
             Constant
             Field Values
            @@ -343,7 +343,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             COLUMN_2
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String COLUMN_2
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String COLUMN_2
             
             See Also:
             Constant
             Field Values
            @@ -356,7 +356,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             TOPSECRET
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String TOPSECRET
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String TOPSECRET
             
             See Also:
             Constant
             Field Values
            @@ -369,7 +369,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             PUBLIC
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String PUBLIC
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String PUBLIC
             
             See Also:
             Constant
             Field Values
            @@ -382,7 +382,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             PRIVATE
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String PRIVATE
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String PRIVATE
             
             See Also:
             Constant
             Field Values
            @@ -395,7 +395,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             CONFIDENTIAL
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String CONFIDENTIAL
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String CONFIDENTIAL
             
             See Also:
             Constant
             Field Values
            @@ -408,7 +408,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             SECRET
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String SECRET
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html 
            b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
            index a36c52a..196d515 100644
            --- a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
            +++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
            @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
             
             
             @InterfaceAudience.Private
            -public class HMobStore
            +public class HMobStore
             extends HStore
             The store implementation to save MOBs (medium objects), it 
            extends the HStore.
              When a descriptor of a column family has the value "IS_MOB", it means this 
            column family
            @@ -154,19 +154,19 @@ extends Field and Description
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             cellsCountCompactedFromMob
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             cellsCountCompactedToMob
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             cellsSizeCompactedFromMob
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             cellsSizeCompactedToMob
             
             
            @@ -198,23 +198,23 @@ extends mobFamilyPath
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             mobFlushCount
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             mobFlushedCellsCount
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             mobFlushedCellsSize
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             mobScanCellsCount
             
             
            -private long
            +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 
            java.util.concurrent.atomic">AtomicLong
             mobScanCellsSize
             
             
            @@ -501,7 +501,7 @@ extends 
             
             LOG
            -private static finalorg.slf4j.Logger LOG
            +private static finalorg.slf4j.Logger LOG
             
             
             
            @@ -510,7 +510,7 @@ extends 
             
             mobCacheConfig
            -privateMobCacheConfig mobCacheConfig
            +privateMobCacheConfig mobCacheConfig
             
             
             
            @@ -519,7 +519,7 @@ extends 
             
             homePath
            -privateorg.apache.hadoop.fs.Path homePath
            +privateorg.apache.hadoop.fs.Path homePath
             
             
             
            @@ -528,7 +528,7 @@ extends 
             
             mobFamilyPath
            -privateorg.apache.hadoop.fs.Path mobFamilyPath
            +privateorg.apache.hadoop.fs.Path mobFamilyPath
             
             
             
            @@ -537,7 +537,7 @@ extends 
             
             cellsCountCompactedToMob
            -private volatilelong cellsCountCompactedToMob
            +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in java.util.concurrent.atomic">AtomicLong cellsCountCompactedToMob
             
             
             
            @@ -546,7 +546,7 @@ extends 
             
             cellsCountCompactedFromMob
            -private volatilelong cellsCountCompactedFromMob
            +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in java.util.concurrent.atomic">AtomicLong cellsCountCompactedFromMob
             
             
             
            @@ -555,7 +555,7 @@ extends 
             
             cellsSizeCompactedToMob
            -private volatilelong cellsSizeCompactedToMob
            +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in java.util.concurrent.atomic">AtomicLong cellsSizeCompactedToMob
             
             
             
            @@ -564,7 +564,7 @@ extends 
             
             cellsSizeCompactedFromMob
            -private volatilelong cellsSizeCompactedFromMob
            +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in java.util.concurrent.atomic">AtomicLong cellsSizeCompactedFromMob
             
             
             
            @@ -573,7 +573,7 @@ extends 
             
             mobFlushCount
            -private volatilelong mobFlushCount
            +privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
             title="class or interface in 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
            --
            diff --git 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
             
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
            index f1db5ca..d8515d7 100644
            --- 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
            +++ 
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
            @@ -32,813 +32,820 @@
             024import static org.junit.Assert.fail;
             025
             026import java.io.IOException;
            -027import java.net.SocketTimeoutException;
            -028import java.util.NavigableMap;
            -029import java.util.Random;
            -030import java.util.Set;
            -031import java.util.SortedSet;
            -032import 
            java.util.concurrent.ConcurrentSkipListMap;
            -033import 
            java.util.concurrent.ConcurrentSkipListSet;
            -034import 
            java.util.concurrent.ExecutionException;
            -035import java.util.concurrent.Executors;
            -036import java.util.concurrent.Future;
            -037import 
            java.util.concurrent.ScheduledExecutorService;
            -038import java.util.concurrent.TimeUnit;
            -039
            -040import 
            org.apache.hadoop.conf.Configuration;
            -041import 
            org.apache.hadoop.hbase.CategoryBasedTimeout;
            -042import 
            org.apache.hadoop.hbase.DoNotRetryIOException;
            -043import 
            org.apache.hadoop.hbase.HBaseTestingUtility;
            -044import 
            org.apache.hadoop.hbase.NotServingRegionException;
            -045import 
            org.apache.hadoop.hbase.ServerName;
            -046import 
            org.apache.hadoop.hbase.TableName;
            -047import 
            org.apache.hadoop.hbase.client.RegionInfo;
            -048import 
            org.apache.hadoop.hbase.client.RegionInfoBuilder;
            -049import 
            org.apache.hadoop.hbase.client.RetriesExhaustedException;
            -050import 
            org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
            -051import 
            org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
            -052import 
            org.apache.hadoop.hbase.master.MasterServices;
            -053import 
            org.apache.hadoop.hbase.master.RegionState.State;
            -054import 
            org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
            -055import 
            org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
            -056import 
            org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher;
            -057import 
            org.apache.hadoop.hbase.procedure2.Procedure;
            -058import 
            org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
            -059import 
            org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
            -060import 
            org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
            -061import 
            org.apache.hadoop.hbase.procedure2.util.StringUtils;
            -062import 
            org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
            -063import 
            org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
            -064import 
            org.apache.hadoop.hbase.testclassification.MasterTests;
            -065import 
            org.apache.hadoop.hbase.testclassification.MediumTests;
            -066import 
            org.apache.hadoop.hbase.util.Bytes;
            -067import 
            org.apache.hadoop.hbase.util.FSUtils;
            -068import 
            org.apache.hadoop.ipc.RemoteException;
            -069import org.junit.After;
            -070import org.junit.Before;
            -071import org.junit.Ignore;
            -072import org.junit.Rule;
            -073import org.junit.Test;
            -074import 
            org.junit.experimental.categories.Category;
            -075import 
            org.junit.rules.ExpectedException;
            -076import org.junit.rules.TestName;
            -077import org.junit.rules.TestRule;
            -078import org.slf4j.Logger;
            -079import org.slf4j.LoggerFactory;
            -080import 
            org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
            -081import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
            -082import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
            -083import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
            -084import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
            -085import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
            -086import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo;
            -087import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
            -088import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
            -089import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
            -090import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
            -091import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
            -092
            -093@Category({MasterTests.class, 
            MediumTests.class})
            -094public class TestAssignmentManager {
            -095  private static final Logger LOG = 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
            --
            diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html 
            b/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
            index e2ff53a..e89ffa5 100644
            --- a/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
            +++ b/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
            @@ -110,104 +110,119 @@
             AbstractTestAsyncTableScan
             
             
            -AbstractTestResultScannerCursor
            +AbstractTestCIOperationTimeout
            +Based class for testing operation timeout logic for 
            ConnectionImplementation.
            +
             
             
            -AbstractTestScanCursor
            +AbstractTestCIRpcTimeout
            +Based class for testing rpc timeout logic for 
            ConnectionImplementation.
            +
             
             
            -AbstractTestShell
            +AbstractTestCITimeout
            +Based class for testing timeout logic for 
            ConnectionImplementation.
            +
             
             
            -ColumnCountOnRowFilter
            +AbstractTestResultScannerCursor
             
             
            +AbstractTestScanCursor
            +
            +
            +AbstractTestShell
            +
            +
            +ColumnCountOnRowFilter
            +
            +
             DoNothingAsyncRegistry
             Registry that does nothing.
             
             
            -
            +
             TestAsyncAdminBase
             Class to test AsyncAdmin.
             
             
            -
            +
             TestAsyncProcess.MyAsyncProcess
             
            -
            +
             TestAsyncProcess.MyAsyncProcessWithReplicas
             
            -
            +
             TestAsyncProcess.MyConnectionImpl
             Returns our async process.
             
             
            -
            +
             TestAsyncProcess.ResponseGenerator
             
            -
            +
             TestAsyncProcess.RR
             After reading TheDailyWtf, I always wanted to create a 
            MyBoolean enum like this!
             
             
            -
            +
             TestAsyncTableGetMultiThreaded
             Will split the table, and move region randomly when 
            testing.
             
             
            -
            +
             TestAsyncTableScanMetrics.ScanWithMetrics
             
            -
            +
             TestBlockEvictionFromClient.CustomInnerRegionObserver
             
            -
            +
             TestBlockEvictionFromClient.GetThread
             
            -
            +
             TestBlockEvictionFromClient.MultiGetThread
             
            -
            +
             TestBlockEvictionFromClient.ScanThread
             
            -
            +
             TestCloneSnapshotFromClient
             Test clone snapshots from the client
             
             
            -
            +
             TestFromClientSide
             Run tests that use the HBase clients; 
            Table.
             
             
            -
            +
             TestFromClientSideScanExcpetion
             
            -
            +
             TestHBaseAdminNoCluster.MethodCaller
             
            -
            +
             TestIncrementsFromClientSide
             Run Increment tests that use the HBase clients; 
            HTable.
             
             
            -
            +
             TestMetaCache.ExceptionInjector
             
            -
            +
             TestMetaCache.FakeRSRpcServices
             
            -
            +
             TestRestoreSnapshotFromClient
             Test restore snapshots from the client
             
             
            -
            +
             TestSnapshotCloneIndependence
             Test to verify that the cloned table is independent of the 
            table from which it was cloned
             
             
            -
            +
             TestSnapshotFromClient
             Test create/using/deleting snapshots from the client
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
             
            b/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
            index 23b0ee4..5022712 100644
            --- 
            a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
            +++ 
            b/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
            @@ -18,7 +18,7 @@
             catch(err) {
             }
             //-->
            -var methods = 
            {"i0":9,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10};
            +var methods = 
            {"i0":9,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10};
             var tabs = {65535:["t0","All Methods"],1:["t1","Static 
            Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
             var altColor = "altColor";
             var rowColor = "rowColor";
            @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -public class TestReplicationAdmin
            +public class TestReplicationAdmin
             extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             Unit testing of ReplicationAdmin
             
            @@ -253,32 +253,40 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             void
            +testPeerClusterKey()
            +
            +
            +void
             testPeerConfig()
             Tests that the peer configuration used by ReplicationAdmin 
            contains all
              the peer's properties.
             
             
            -
            +
             void
             testPeerConfigConflict()
             
            -
            +
             void
             testPeerExcludeNamespaces()
             
            -
            +
             void
             testPeerExcludeTableCFs()
             
            -
            +
            +void
            +testPeerReplicationEndpointImpl()
            +
            +
             void
             testRemovePeerTableCFs()
             
            -
            +
             void
             testSetPeerNamespaces()
             
            -
            +
             void
             testSetReplicateAllUserTables()
             
            @@ -310,7 +318,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             LOG
            -private static finalorg.slf4j.Logger LOG
            +private static finalorg.slf4j.Logger LOG
             
             
             
            @@ -319,7 +327,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             TEST_UTIL
            -private static finalHBaseTestingUtility 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
            --
            diff --git 
            a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html 
            b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
            index a4307f9..9a73216 100644
            --- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
            +++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
            @@ -748,7 +748,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             init
            -privatevoidinit(org.apache.hadoop.conf.Configurationconf,
            +privatevoidinit(org.apache.hadoop.conf.Configurationconf,
               org.apache.hadoop.hbase.HColumnDescriptorhcd)
            throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             
            @@ -763,7 +763,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testGetFromMemStore
            -publicvoidtestGetFromMemStore()
            +publicvoidtestGetFromMemStore()
              throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Getting data from memstore
             
            @@ -778,7 +778,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testGetFromFiles
            -publicvoidtestGetFromFiles()
            +publicvoidtestGetFromFiles()
               throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Getting MOB data from files
             
            @@ -793,7 +793,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testGetReferencesFromFiles
            -publicvoidtestGetReferencesFromFiles()
            +publicvoidtestGetReferencesFromFiles()
             throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Getting the reference data from files
             
            @@ -808,7 +808,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testGetFromMemStoreAndFiles
            -publicvoidtestGetFromMemStoreAndFiles()
            +publicvoidtestGetFromMemStoreAndFiles()
              throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Getting data from memstore and files
             
            @@ -823,7 +823,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testMobCellSizeThreshold
            -publicvoidtestMobCellSizeThreshold()
            +publicvoidtestMobCellSizeThreshold()
               throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Getting data from memstore and files
             
            @@ -838,7 +838,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testCommitFile
            -publicvoidtestCommitFile()
            +publicvoidtestCommitFile()
             throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
             title="class or interface in java.lang">Exception
             
             Throws:
            @@ -852,7 +852,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testResolve
            -publicvoidtestResolve()
            +publicvoidtestResolve()
              throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
             title="class or interface in java.lang">Exception
             
             Throws:
            @@ -866,7 +866,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             flush
            -privatevoidflush(intstoreFilesSize)
            +privatevoidflush(intstoreFilesSize)
             throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Flush the memstore
             
            @@ -883,7 +883,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             flushStore
            -private staticvoidflushStore(org.apache.hadoop.hbase.regionserver.HMobStorestore,
            +private staticvoidflushStore(org.apache.hadoop.hbase.regionserver.HMobStorestore,
            longid)
             throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Flush the memstore
            @@ -902,7 +902,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             testMOBStoreEncryption
            -publicvoidtestMOBStoreEncryption()
            +publicvoidtestMOBStoreEncryption()
             throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
             title="class or interface in java.lang">Exception
             
             Throws:
            @@ -916,7 +916,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
            b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
            index ce948d2..66944b6 100644
            --- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
            +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
            @@ -18,7 +18,7 @@
             catch(err) {
             }
             //-->
            -var methods = 
            {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10};
            +var methods = 
            {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10};
             var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
            Methods"],8:["t4","Concrete Methods"]};
             var altColor = "altColor";
             var rowColor = "rowColor";
            @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
             
             
             @InterfaceAudience.Private
            -public class MasterRpcServices
            +public class MasterRpcServices
             extends RSRpcServices
             implements 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface,
             
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface,
             
            org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService.BlockingInterface
             Implements the master RPC services.
            @@ -635,111 +635,116 @@ implements 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
              
            org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequestrequest)
             
             
            +org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneResponse
            +reportProcedureDone(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
            +   
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneRequestrequest)
            +
            +
             org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse
             reportRegionSpaceUse(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
             
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequestrequest)
             
            -
            +
             org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse
             reportRegionStateTransition(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllerc,
            
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequestreq)
             
            -
            +
             org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse
             reportRSFatalError(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
               
            org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestrequest)
             
            -
            +
             org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse
             requestLock(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
            
            org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequestrequest)
             
            -
            +
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
             
            b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
            index 763eec0..c4f0db1 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
            @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -static class StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction
            +static class StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction
             extends StochasticLoadBalancer.CostFunction
             Compute the cost of a potential cluster state from skew in 
            number of
              primary regions on a cluster.
            @@ -230,7 +230,7 @@ extends 
             
             PRIMARY_REGION_COUNT_SKEW_COST_KEY
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String PRIMARY_REGION_COUNT_SKEW_COST_KEY
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String PRIMARY_REGION_COUNT_SKEW_COST_KEY
             
             See Also:
             Constant
             Field Values
            @@ -243,7 +243,7 @@ extends 
             
             DEFAULT_PRIMARY_REGION_COUNT_SKEW_COST
            -private static finalfloat DEFAULT_PRIMARY_REGION_COUNT_SKEW_COST
            +private static finalfloat DEFAULT_PRIMARY_REGION_COUNT_SKEW_COST
             
             See Also:
             Constant
             Field Values
            @@ -256,7 +256,7 @@ extends 
             
             stats
            -privatedouble[] stats
            +privatedouble[] stats
             
             
             
            @@ -273,7 +273,7 @@ extends 
             
             PrimaryRegionCountSkewCostFunction
            -PrimaryRegionCountSkewCostFunction(org.apache.hadoop.conf.Configurationconf)
            +PrimaryRegionCountSkewCostFunction(org.apache.hadoop.conf.Configurationconf)
             
             
             
            @@ -290,7 +290,7 @@ extends 
             
             cost
            -doublecost()
            +doublecost()
             
             Specified by:
             costin
             classStochasticLoadBalancer.CostFunction
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
             
            b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
            index 268e2f3..5550840 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
            @@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -static class StochasticLoadBalancer.RackLocalityCostFunction
            +static class StochasticLoadBalancer.RackLocalityCostFunction
             extends StochasticLoadBalancer.LocalityBasedCostFunction
             
             
            @@ -239,7 +239,7 @@ extends 
             
             RACK_LOCALITY_COST_KEY
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String RACK_LOCALITY_COST_KEY
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String RACK_LOCALITY_COST_KEY
             
             See Also:
             Constant
             Field Values
            @@ -252,7 +252,7 @@ extends 
             
             DEFAULT_RACK_LOCALITY_COST
            -private static finalfloat DEFAULT_RACK_LOCALITY_COST
            +private static finalfloat DEFAULT_RACK_LOCALITY_COST
             
             See Also:
             Constant
             Field Values
            @@ -273,7 +273,7 @@ extends 
             
             RackLocalityCostFunction
            -publicRackLocalityCostFunction(org.apache.hadoop.conf.Configurationconf,
            +publicRackLocalityCostFunction(org.apache.hadoop.conf.Configurationconf,
             MasterServicesservices)
             
             
            @@ -291,7 +291,7 @@ extends 
             
             regionIndexToEntityIndex
            -intregionIndexToEntityIndex(intregion)
            +intregionIndexToEntityIndex(intregion)
             Description copied from 
            class:StochasticLoadBalancer.LocalityBasedCostFunction
             Maps region to the current entity (server or rack) on which 
            it is stored
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RandomCandidateGenerator.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RandomCandidateGenerator.html
             
            b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RandomCandidateGenerator.html
            index 233412a..992fe88 100644
            --- 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
            index b8e321a..439a50d 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
            @@ -468,274 +468,216 @@
             460  }
             461
             462  /**
            -463   * Used to gracefully handle fallback 
            to deprecated methods when we
            -464   * evolve coprocessor APIs.
            -465   *
            -466   * When a particular Coprocessor API is 
            updated to change methods, hosts can support fallback
            -467   * to the deprecated API by using this 
            method to determine if an instance implements the new API.
            -468   * In the event that said support is 
            partial, then in the face of a runtime issue that prevents
            -469   * proper operation {@link 
            #legacyWarning(Class, String)} should be used to let operators know.
            -470   *
            -471   * For examples of this in action, see 
            the implementation of
            -472   * ul
            -473   *   li{@link 
            org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost}
            -474   *   li{@link 
            org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost}
            -475   * /ul
            -476   *
            -477   * @param clazz Coprocessor you wish to 
            evaluate
            -478   * @param methodName the name of the 
            non-deprecated method version
            -479   * @param parameterTypes the Class of 
            the non-deprecated method's arguments in the order they are
            -480   * declared.
            -481   */
            -482  @InterfaceAudience.Private
            -483  protected static boolean 
            useLegacyMethod(final Class? extends Coprocessor clazz,
            -484  final String methodName, final 
            Class?... parameterTypes) {
            -485boolean useLegacy;
            -486// Use reflection to see if they 
            implement the non-deprecated version
            -487try {
            -488  clazz.getDeclaredMethod(methodName, 
            parameterTypes);
            -489  LOG.debug("Found an implementation 
            of '" + methodName + "' that uses updated method " +
            -490  "signature. Skipping legacy 
            support for invocations in '" + clazz +"'.");
            -491  useLegacy = false;
            -492} catch (NoSuchMethodException 
            exception) {
            -493  useLegacy = true;
            -494} catch (SecurityException exception) 
            {
            -495  LOG.warn("The Security Manager 
            denied our attempt to detect if the coprocessor '" + clazz +
            -496  "' requires legacy support; 
            assuming it does. If you get later errors about legacy " +
            -497  "coprocessor use, consider 
            updating your security policy to allow access to the package" +
            -498  " and declared members of your 
            implementation.");
            -499  LOG.debug("Details of Security 
            Manager rejection.", exception);
            -500  useLegacy = true;
            +463   * Used to limit legacy handling to 
            once per Coprocessor class per classloader.
            +464   */
            +465  private static final SetClass? 
            extends Coprocessor legacyWarning =
            +466  new 
            ConcurrentSkipListSet(
            +467  new ComparatorClass? 
            extends Coprocessor() {
            +468@Override
            +469public int compare(Class? 
            extends Coprocessor c1, Class? extends Coprocessor c2) {
            +470  if (c1.equals(c2)) {
            +471return 0;
            +472  }
            +473  return 
            c1.getName().compareTo(c2.getName());
            +474}
            +475  });
            +476
            +477  /**
            +478   * Implementations defined function to 
            get an observer of type {@code O} from a coprocessor of
            +479   * type {@code C}. Concrete 
            implementations of CoprocessorHost define one getter for each
            +480   * observer they can handle. For e.g. 
            RegionCoprocessorHost will use 3 getters, one for
            +481   * each of RegionObserver, 
            EndpointObserver and BulkLoadObserver.
            +482   * These getters are used by {@code 
            ObserverOperation} to get appropriate observer from the
            +483   * coprocessor.
            +484   */
            +485  @FunctionalInterface
            +486  public interface ObserverGetterC, 
            O extends FunctionC, OptionalO {}
            +487
            +488  private abstract class 
            ObserverOperationO extends ObserverContextImplE {
            +489ObserverGetterC, O 
            observerGetter;
            +490
            +491
            ObserverOperation(ObserverGetterC, O observerGetter) {
            +492  this(observerGetter, null);
            +493}
            +494
            +495
            ObserverOperation(ObserverGetterC, O observerGetter, User user) {
            +496  this(observerGetter, user, 
            false);
            +497}
            +498
            +499
            ObserverOperation(ObserverGetterC, O observerGetter, boolean 
            bypassable) {
            +500  this(observerGetter, null, 
            bypassable);
             501}
            -502return useLegacy;
            -503  }
            -504
            -505  /**
            -506   * Used to limit legacy handling to 
            once per Coprocessor class per classloader.
            -507   */
            -508  private static final SetClass? 
            extends Coprocessor legacyWarning =
            -509  new 
            ConcurrentSkipListSet(
            -510  new 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
             
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
            index 724353c..c5a4a7f 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
            @@ -274,6 +274,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/IllegalState
             
             
             
            -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
            +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
            index 75e7b05..06b0d14 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
            @@ -1896,6 +1896,6 @@ publicCopyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
            +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
            index 12eac35..e740733 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
            @@ -567,6 +567,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
            +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
            index 71186cf..26c8b69 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
            @@ -285,6 +285,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
             
             
             
            -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
            +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
            index 330e08a..bbef49a 100644
            --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
            +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
            @@ -1488,6 +1488,6 @@ implements Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
            +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
            index 644d487..5bd8d1c 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
            @@ -406,6 +406,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
            +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
            reserved.
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html
            --
            diff --git 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
            --
            diff --git 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
             
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
            index bbd91b8..4f76302 100644
            --- 
            a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
            +++ 
            b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
            @@ -56,1641 +56,1753 @@
             048import 
            java.util.concurrent.atomic.AtomicBoolean;
             049import 
            java.util.concurrent.atomic.AtomicInteger;
             050import 
            java.util.concurrent.atomic.AtomicLong;
            -051
            -052import 
            org.apache.hadoop.conf.Configuration;
            -053import 
            org.apache.hadoop.hbase.CallQueueTooBigException;
            -054import 
            org.apache.hadoop.hbase.CategoryBasedTimeout;
            -055import org.apache.hadoop.hbase.Cell;
            -056import 
            org.apache.hadoop.hbase.HConstants;
            -057import 
            org.apache.hadoop.hbase.HRegionInfo;
            -058import 
            org.apache.hadoop.hbase.HRegionLocation;
            -059import 
            org.apache.hadoop.hbase.RegionLocations;
            -060import 
            org.apache.hadoop.hbase.ServerName;
            -061import 
            org.apache.hadoop.hbase.TableName;
            -062import 
            org.apache.hadoop.hbase.client.AsyncProcessTask.ListRowAccess;
            -063import 
            org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows;
            -064import 
            org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
            -065import 
            org.apache.hadoop.hbase.client.backoff.ServerStatistics;
            -066import 
            org.apache.hadoop.hbase.client.coprocessor.Batch;
            -067import 
            org.apache.hadoop.hbase.ipc.RpcControllerFactory;
            -068import 
            org.apache.hadoop.hbase.testclassification.ClientTests;
            -069import 
            org.apache.hadoop.hbase.testclassification.MediumTests;
            -070import 
            org.apache.hadoop.hbase.util.Bytes;
            -071import 
            org.apache.hadoop.hbase.util.Threads;
            -072import org.junit.Assert;
            -073import org.junit.BeforeClass;
            -074import org.junit.Ignore;
            -075import org.junit.Rule;
            -076import org.junit.Test;
            -077import 
            org.junit.experimental.categories.Category;
            -078import org.junit.rules.TestRule;
            -079import org.mockito.Mockito;
            -080import org.slf4j.Logger;
            -081import org.slf4j.LoggerFactory;
            -082
            -083@Category({ClientTests.class, 
            MediumTests.class})
            -084public class TestAsyncProcess {
            -085  @Rule public final TestRule timeout = 
            CategoryBasedTimeout.builder().withTimeout(this.getClass()).
            -086  
            withLookingForStuckThread(true).build();
            -087  private static final Logger LOG = 
            LoggerFactory.getLogger(TestAsyncProcess.class);
            -088  private static final TableName 
            DUMMY_TABLE =
            -089  TableName.valueOf("DUMMY_TABLE");
            -090  private static final byte[] 
            DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1");
            -091  private static final byte[] 
            DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2");
            -092  private static final byte[] 
            DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3");
            -093  private static final byte[] FAILS = 
            Bytes.toBytes("FAILS");
            -094  private static final Configuration CONF 
            = new Configuration();
            -095  private static final 
            ConnectionConfiguration CONNECTION_CONFIG =
            -096  new 
            ConnectionConfiguration(CONF);
            -097  private static final ServerName sn = 
            ServerName.valueOf("s1,1,1");
            -098  private static final ServerName sn2 = 
            ServerName.valueOf("s2,2,2");
            -099  private static final ServerName sn3 = 
            ServerName.valueOf("s3,3,3");
            -100  private static final HRegionInfo hri1 
            =
            -101  new HRegionInfo(DUMMY_TABLE, 
            DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1);
            -102  private static final HRegionInfo hri2 
            =
            -103  new HRegionInfo(DUMMY_TABLE, 
            DUMMY_BYTES_2, HConstants.EMPTY_END_ROW, false, 2);
            -104  private static final HRegionInfo hri3 
            =
            -105  new HRegionInfo(DUMMY_TABLE, 
            DUMMY_BYTES_3, HConstants.EMPTY_END_ROW, false, 3);
            -106  private static final HRegionLocation 
            loc1 = new HRegionLocation(hri1, sn);
            -107  private static final HRegionLocation 
            loc2 = new HRegionLocation(hri2, sn);
            -108  private static final HRegionLocation 
            loc3 = new HRegionLocation(hri3, sn2);
            -109
            -110  // Replica stuff
            -111  private static final RegionInfo hri1r1 
            = RegionReplicaUtil.getRegionInfoForReplica(hri1, 1);
            -112  private static final RegionInfo hri1r2 
            = RegionReplicaUtil.getRegionInfoForReplica(hri1, 2);
            -113  private static final RegionInfo hri2r1 
            = RegionReplicaUtil.getRegionInfoForReplica(hri2, 1);
            -114  private static final RegionLocations 
            hrls1 = new RegionLocations(new HRegionLocation(hri1, sn),
            -115  new HRegionLocation(hri1r1, sn2), 
            new HRegionLocation(hri1r2, sn3));
            -116  private static final RegionLocations 
            hrls2 = new RegionLocations(new HRegionLocation(hri2, sn2),
            -117  new HRegionLocation(hri2r1, 
            sn3));
            -118  private static final RegionLocations 
            hrls3 =
            -119  new RegionLocations(new 
            HRegionLocation(hri3, sn3), null);
            -120
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
             
            b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
            index 010a1d0..e90dedc 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
            @@ -254,7 +254,7 @@ extends 
             
             Methods inherited from classorg.apache.hadoop.hbase.regionserver.HRegionServer
            -abort,
             addRegion,
             addToMovedRegions,
             checkFileSystem,
             cleanMovedRegions,
             clearRegionBlockCache,
             closeAllRegions, href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createClusterConnection--">createClusterConnection,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection,
             > <
             a 
            href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionLoad-java.lang.String-">createRegionLoad,
             createRegionServerStatusStub,
             createRegionServerStatusStub,
             execRegionServerService,
             getCacheConfig,
             getChoreService,
             getClusterConnection,
             getClusterId,
             getCompactionPressure,
             getCompactionRequestor,
             getCompactSplitThread,
             getConfiguration,
             getConfigurationManager,
             getConnection,
             getCoordinatedStateManager,
             getEventLoopGroupConfig,
             getExecutorService,
             getFavoredNodesForRegion,
             getFileSystem,
             getFlushPressure,
             getFlushRequester,
             getFlushThroughputController, getFsTableDescriptors,
             getHeapMemoryManager,
             getInfoServer,
             getLastSequenceId,
             getLeases,
             getMasterAddressTracker,
             getMetaTableLocator,
             getMetrics, getMostLoadedRegions,
             getNonceManager,
             getNumberOfOnlineRegions,
             getOnlineRegion,
             getOnlineRegionsLocalContext,
             getOnlineTables,
             getRegion,
             getRegion, getRegionBlockLocations,
             getRegionByEncodedName,
             getRegionByEncodedName,
             getRegions,
             getRegions,
             getRegionServerAccounting,
             getRegionServerCoprocessorHost, getRegionServerCoprocessors,
             getRegionServerMetrics,
             getRegionServerRpcQuotaManager,
             getRegionServerSpaceQuotaManager,
             getRegionsInTransitionInRS,
             getReplicationSourceService,
             getRootDir, getRpcServer,
             getRSRpcServices,
             getSecureBulkLoadManager,
             getStartcode,
             getThreadWakeFrequency,
             getWAL,
             getWALFileSystem,
             getWalRoller, getWALRootDir,
             getWALs,
             handleReportForDutyResponse,
             initializeMemStoreChunkCreator,
             isAborted,
             isOnline,
             isStopped,
             <
             a 
            href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping,
             kill,
             movedRegionCleanerPeriod,
             onConfigurationChange,
             postOpenDeployTasks,
             regionLock,
             removeRegion,
             reportRegionSizesForQuotas,
             reportRegionStateTransition,
             sendShutdownInterrupt,
             setInitLatch,
             setupClusterConnection,
             shouldUseThisHostnameInstead, stop,
             stop,
             toString,
             tryRegionServerReport,
             unassign,
             updateConfiguration,
             updateRegionFavoredNodesMapping,
             waitForServerOnline,
             walRollRequestFinished
            +abort,
             addRegion,
             addToMovedRegions,
             checkFileSystem,
             cleanMovedRegions,
             clearRegionBlockCache,
             closeAllRegions, href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createClusterConnection--">createClusterConnection,
             > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection,
             > <
             a 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html 
            b/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
            index 7d4d03e..47b04e4 100644
            --- a/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
            +++ b/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
            @@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
             
             @InterfaceAudience.Private
              @InterfaceStability.Evolving
            -public class HttpServer
            +public class HttpServer
             extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             implements FilterContainer
             Create a Jetty embedded server to answer http requests. The 
            primary goal
            @@ -684,7 +684,7 @@ implements 
             
             LOG
            -private static finalorg.slf4j.Logger LOG
            +private static finalorg.slf4j.Logger LOG
             
             
             
            @@ -693,7 +693,7 @@ implements 
             
             EMPTY_STRING
            -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String EMPTY_STRING
            +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String EMPTY_STRING
             
             See Also:
             Constant
             Field Values
            @@ -706,7 +706,7 @@ implements 
             
             DEFAULT_MAX_HEADER_SIZE
            -private static finalint DEFAULT_MAX_HEADER_SIZE
            +private static finalint DEFAULT_MAX_HEADER_SIZE
             
             See Also:
             Constant
             Field Values
            @@ -719,7 +719,7 @@ implements 
             
             FILTER_INITIALIZERS_PROPERTY
            -static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String FILTER_INITIALIZERS_PROPERTY
            +static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String FILTER_INITIALIZERS_PROPERTY
             
             See Also:
             Constant
             Field Values
            @@ -732,7 +732,7 @@ implements 
             
             HTTP_MAX_THREADS
            -static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_MAX_THREADS
            +static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_MAX_THREADS
             
             See Also:
             Constant
             Field Values
            @@ -745,7 +745,7 @@ implements 
             
             HTTP_UI_AUTHENTICATION
            -public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_UI_AUTHENTICATION
            +public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_UI_AUTHENTICATION
             
             See Also:
             Constant
             Field Values
            @@ -758,7 +758,7 @@ implements 
             
             HTTP_AUTHENTICATION_PREFIX
            -static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_AUTHENTICATION_PREFIX
            +static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_AUTHENTICATION_PREFIX
             
             See Also:
             Constant
             Field Values
            @@ -771,7 +771,7 @@ implements 
             
             HTTP_SPNEGO_AUTHENTICATION_PREFIX
            -static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PREFIX
            +static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PREFIX
             
             See Also:
             Constant
             Field Values
            @@ -784,7 +784,7 @@ implements 
             
             HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX
            -static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX
            +static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX
             
             See Also:
             Constant
             Field Values
            @@ -797,7 +797,7 @@ implements 
             
             HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY
            -public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY
            +public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY
             
             See Also:
             Constant
             Field Values
            @@ -810,7 +810,7 @@ implements 
             
             HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX
            -static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
             title="class or interface in 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
            index 3400507..2baa140 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
            @@ -28,3034 +28,2926 @@
             020import static 
            org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
             021import static 
            org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
             022
            -023import 
            com.google.common.annotations.VisibleForTesting;
            -024
            -025import java.io.DataOutput;
            -026import java.io.DataOutputStream;
            -027import java.io.IOException;
            -028import java.io.OutputStream;
            -029import java.math.BigDecimal;
            -030import java.nio.ByteBuffer;
            -031import java.util.ArrayList;
            -032import java.util.Iterator;
            -033import java.util.List;
            -034import java.util.Optional;
            -035
            -036import 
            org.apache.hadoop.hbase.KeyValue.Type;
            -037import 
            org.apache.hadoop.hbase.filter.ByteArrayComparable;
            -038import 
            org.apache.hadoop.hbase.io.HeapSize;
            -039import 
            org.apache.hadoop.hbase.io.TagCompressionContext;
            -040import 
            org.apache.hadoop.hbase.io.util.Dictionary;
            -041import 
            org.apache.hadoop.hbase.io.util.StreamUtils;
            -042import 
            org.apache.hadoop.hbase.util.ByteBufferUtils;
            -043import 
            org.apache.hadoop.hbase.util.ByteRange;
            -044import 
            org.apache.hadoop.hbase.util.Bytes;
            -045import 
            org.apache.hadoop.hbase.util.ClassSize;
            -046import 
            org.apache.yetus.audience.InterfaceAudience;
            -047
            -048
            -049/**
            -050 * Utility methods helpful slinging 
            {@link Cell} instances. It has more powerful and
            -051 * rich set of APIs than those in {@link 
            CellUtil} for internal usage.
            -052 */
            -053@InterfaceAudience.Private
            -054public final class PrivateCellUtil {
            -055
            -056  /**
            -057   * Private constructor to keep this 
            class from being instantiated.
            -058   */
            -059  private PrivateCellUtil() {
            -060  }
            +023import java.io.DataOutput;
            +024import java.io.DataOutputStream;
            +025import java.io.IOException;
            +026import java.io.OutputStream;
            +027import java.math.BigDecimal;
            +028import java.nio.ByteBuffer;
            +029import java.util.ArrayList;
            +030import java.util.Iterator;
            +031import java.util.List;
            +032import java.util.Optional;
            +033import 
            org.apache.hadoop.hbase.KeyValue.Type;
            +034import 
            org.apache.hadoop.hbase.filter.ByteArrayComparable;
            +035import 
            org.apache.hadoop.hbase.io.HeapSize;
            +036import 
            org.apache.hadoop.hbase.io.TagCompressionContext;
            +037import 
            org.apache.hadoop.hbase.io.util.Dictionary;
            +038import 
            org.apache.hadoop.hbase.io.util.StreamUtils;
            +039import 
            org.apache.hadoop.hbase.util.ByteBufferUtils;
            +040import 
            org.apache.hadoop.hbase.util.ByteRange;
            +041import 
            org.apache.hadoop.hbase.util.Bytes;
            +042import 
            org.apache.hadoop.hbase.util.ClassSize;
            +043import 
            org.apache.yetus.audience.InterfaceAudience;
            +044
            +045import 
            org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
            +046
            +047/**
            +048 * Utility methods helpful slinging 
            {@link Cell} instances. It has more powerful and
            +049 * rich set of APIs than those in {@link 
            CellUtil} for internal usage.
            +050 */
            +051@InterfaceAudience.Private
            +052public final class PrivateCellUtil {
            +053
            +054  /**
            +055   * Private constructor to keep this 
            class from being instantiated.
            +056   */
            +057  private PrivateCellUtil() {
            +058  }
            +059
            +060  /*** ByteRange 
            ***/
             061
            -062  /*** ByteRange 
            ***/
            -063
            -064  public static ByteRange 
            fillRowRange(Cell cell, ByteRange range) {
            -065return range.set(cell.getRowArray(), 
            cell.getRowOffset(), cell.getRowLength());
            -066  }
            -067
            -068  public static ByteRange 
            fillFamilyRange(Cell cell, ByteRange range) {
            -069return 
            range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
            cell.getFamilyLength());
            -070  }
            -071
            -072  public static ByteRange 
            fillQualifierRange(Cell cell, ByteRange range) {
            -073return 
            range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
            -074  cell.getQualifierLength());
            -075  }
            -076
            -077  public static ByteRange 
            fillValueRange(Cell cell, ByteRange range) {
            -078return 
            range.set(cell.getValueArray(), cell.getValueOffset(), 
            cell.getValueLength());
            -079  }
            -080
            -081  public static ByteRange 
            fillTagRange(Cell cell, ByteRange range) {
            -082return range.set(cell.getTagsArray(), 
            cell.getTagsOffset(), cell.getTagsLength());
            -083  }
            +062  public static ByteRange 
            fillRowRange(Cell cell, ByteRange range) {
            +063return range.set(cell.getRowArray(), 
            cell.getRowOffset(), cell.getRowLength());
            +064  }
            +065
            +066  public static ByteRange 
            fillFamilyRange(Cell cell, ByteRange range) {
            +067return 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b618ac40/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
            --
            diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
            index 38865a3..8b6f080 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
            @@ -26,746 +26,954 @@
             018
             019package org.apache.hadoop.hbase.client;
             020
            -021import java.io.IOException;
            -022import java.nio.ByteBuffer;
            -023import java.util.ArrayList;
            -024import java.util.Arrays;
            -025import java.util.HashMap;
            -026import java.util.List;
            -027import java.util.Map;
            -028import java.util.NavigableMap;
            -029import java.util.TreeMap;
            -030import java.util.UUID;
            -031import java.util.stream.Collectors;
            -032import org.apache.hadoop.hbase.Cell;
            -033import 
            org.apache.hadoop.hbase.CellScannable;
            -034import 
            org.apache.hadoop.hbase.CellScanner;
            -035import 
            org.apache.hadoop.hbase.CellUtil;
            -036import 
            org.apache.hadoop.hbase.HConstants;
            -037import 
            org.apache.hadoop.hbase.KeyValue;
            -038import 
            org.apache.hadoop.hbase.PrivateCellUtil;
            -039import org.apache.hadoop.hbase.Tag;
            -040import 
            org.apache.hadoop.hbase.exceptions.DeserializationException;
            -041import 
            org.apache.hadoop.hbase.io.HeapSize;
            -042import 
            org.apache.hadoop.hbase.protobuf.ProtobufUtil;
            -043import 
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
            -044import 
            org.apache.hadoop.hbase.security.access.AccessControlConstants;
            -045import 
            org.apache.hadoop.hbase.security.access.AccessControlUtil;
            -046import 
            org.apache.hadoop.hbase.security.access.Permission;
            -047import 
            org.apache.hadoop.hbase.security.visibility.CellVisibility;
            -048import 
            org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
            -049import 
            org.apache.hadoop.hbase.util.Bytes;
            -050import 
            org.apache.hadoop.hbase.util.ClassSize;
            -051import 
            org.apache.yetus.audience.InterfaceAudience;
            -052
            -053import 
            org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
            -054import 
            org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
            -055import 
            org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
            -056import 
            org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
            -057import 
            org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
            -058import 
            org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
            +021import static 
            org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
            +022
            +023import java.io.IOException;
            +024import java.nio.ByteBuffer;
            +025import java.util.ArrayList;
            +026import java.util.Arrays;
            +027import java.util.HashMap;
            +028import java.util.Iterator;
            +029import java.util.List;
            +030import java.util.Map;
            +031import java.util.NavigableMap;
            +032import java.util.Optional;
            +033import java.util.TreeMap;
            +034import java.util.UUID;
            +035import java.util.stream.Collectors;
            +036import 
            org.apache.hadoop.hbase.ArrayBackedTag;
            +037import org.apache.hadoop.hbase.Cell;
            +038import 
            org.apache.hadoop.hbase.CellScannable;
            +039import 
            org.apache.hadoop.hbase.CellScanner;
            +040import 
            org.apache.hadoop.hbase.CellUtil;
            +041import 
            org.apache.hadoop.hbase.ExtendedCell;
            +042import 
            org.apache.hadoop.hbase.HConstants;
            +043import 
            org.apache.hadoop.hbase.KeyValue;
            +044import 
            org.apache.hadoop.hbase.PrivateCellUtil;
            +045import org.apache.hadoop.hbase.RawCell;
            +046import org.apache.hadoop.hbase.Tag;
            +047import 
            org.apache.hadoop.hbase.exceptions.DeserializationException;
            +048import 
            org.apache.hadoop.hbase.io.HeapSize;
            +049import 
            org.apache.hadoop.hbase.protobuf.ProtobufUtil;
            +050import 
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
            +051import 
            org.apache.hadoop.hbase.security.access.AccessControlConstants;
            +052import 
            org.apache.hadoop.hbase.security.access.AccessControlUtil;
            +053import 
            org.apache.hadoop.hbase.security.access.Permission;
            +054import 
            org.apache.hadoop.hbase.security.visibility.CellVisibility;
            +055import 
            org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
            +056import 
            org.apache.hadoop.hbase.util.Bytes;
            +057import 
            org.apache.hadoop.hbase.util.ClassSize;
            +058import 
            org.apache.yetus.audience.InterfaceAudience;
             059
            -060@InterfaceAudience.Public
            -061public abstract class Mutation extends 
            OperationWithAttributes implements Row, CellScannable,
            -062HeapSize {
            -063  public static final long 
            MUTATION_OVERHEAD = ClassSize.align(
            -064  // This
            -065  ClassSize.OBJECT +
            -066  // row + 
            OperationWithAttributes.attributes
            -067  2 * ClassSize.REFERENCE +
            -068  // Timestamp
            -069  1 * Bytes.SIZEOF_LONG +
            -070  // durability
            -071  ClassSize.REFERENCE +
            -072  // familyMap
            -073  ClassSize.REFERENCE +
            -074  // familyMap
            -075  ClassSize.TREEMAP +
            -076  // priority
            -077  ClassSize.INTEGER
            -078  );
            -079
            -080  /**
            -081   

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7c0589c0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
            index 6fecbc9..2accda0 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
            @@ -34,4140 +34,4141 @@
             026import 
            java.nio.charset.StandardCharsets;
             027import java.util.ArrayList;
             028import java.util.Arrays;
            -029import java.util.Collection;
            -030import java.util.EnumSet;
            -031import java.util.HashMap;
            -032import java.util.Iterator;
            -033import java.util.LinkedList;
            -034import java.util.List;
            -035import java.util.Map;
            -036import java.util.Set;
            -037import java.util.concurrent.Callable;
            -038import 
            java.util.concurrent.ExecutionException;
            -039import java.util.concurrent.Future;
            -040import java.util.concurrent.TimeUnit;
            -041import 
            java.util.concurrent.TimeoutException;
            -042import 
            java.util.concurrent.atomic.AtomicInteger;
            -043import 
            java.util.concurrent.atomic.AtomicReference;
            -044import java.util.regex.Pattern;
            -045import java.util.stream.Collectors;
            -046import java.util.stream.Stream;
            -047import 
            org.apache.hadoop.conf.Configuration;
            -048import 
            org.apache.hadoop.hbase.Abortable;
            -049import 
            org.apache.hadoop.hbase.CacheEvictionStats;
            -050import 
            org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
            -051import 
            org.apache.hadoop.hbase.ClusterMetrics.Option;
            -052import 
            org.apache.hadoop.hbase.ClusterStatus;
            -053import 
            org.apache.hadoop.hbase.DoNotRetryIOException;
            -054import 
            org.apache.hadoop.hbase.HBaseConfiguration;
            -055import 
            org.apache.hadoop.hbase.HConstants;
            -056import 
            org.apache.hadoop.hbase.HRegionInfo;
            -057import 
            org.apache.hadoop.hbase.HRegionLocation;
            -058import 
            org.apache.hadoop.hbase.HTableDescriptor;
            -059import 
            org.apache.hadoop.hbase.MasterNotRunningException;
            -060import 
            org.apache.hadoop.hbase.MetaTableAccessor;
            -061import 
            org.apache.hadoop.hbase.NamespaceDescriptor;
            -062import 
            org.apache.hadoop.hbase.NamespaceNotFoundException;
            -063import 
            org.apache.hadoop.hbase.NotServingRegionException;
            -064import 
            org.apache.hadoop.hbase.RegionLoad;
            -065import 
            org.apache.hadoop.hbase.RegionLocations;
            -066import 
            org.apache.hadoop.hbase.ServerName;
            -067import 
            org.apache.hadoop.hbase.TableExistsException;
            -068import 
            org.apache.hadoop.hbase.TableName;
            -069import 
            org.apache.hadoop.hbase.TableNotDisabledException;
            -070import 
            org.apache.hadoop.hbase.TableNotFoundException;
            -071import 
            org.apache.hadoop.hbase.UnknownRegionException;
            -072import 
            org.apache.hadoop.hbase.ZooKeeperConnectionException;
            -073import 
            org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
            -074import 
            org.apache.hadoop.hbase.client.replication.TableCFs;
            -075import 
            org.apache.hadoop.hbase.client.security.SecurityCapability;
            -076import 
            org.apache.hadoop.hbase.exceptions.TimeoutIOException;
            -077import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
            -078import 
            org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
            -079import 
            org.apache.hadoop.hbase.ipc.HBaseRpcController;
            -080import 
            org.apache.hadoop.hbase.ipc.RpcControllerFactory;
            -081import 
            org.apache.hadoop.hbase.quotas.QuotaFilter;
            -082import 
            org.apache.hadoop.hbase.quotas.QuotaRetriever;
            -083import 
            org.apache.hadoop.hbase.quotas.QuotaSettings;
            -084import 
            org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
            -085import 
            org.apache.hadoop.hbase.replication.ReplicationException;
            -086import 
            org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
            -087import 
            org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
            -088import 
            org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
            -089import 
            org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
            -090import 
            org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
            -091import 
            org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
            -092import 
            org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
            -093import 
            org.apache.hadoop.hbase.util.Addressing;
            -094import 
            org.apache.hadoop.hbase.util.Bytes;
            -095import 
            org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
            -096import 
            org.apache.hadoop.hbase.util.ForeignExceptionUtil;
            -097import 
            org.apache.hadoop.hbase.util.Pair;
            -098import 
            org.apache.hadoop.ipc.RemoteException;
            -099import 
            org.apache.hadoop.util.StringUtils;
            -100import 
            org.apache.yetus.audience.InterfaceAudience;
            -101import 
            org.apache.yetus.audience.InterfaceStability;
            -102import org.slf4j.Logger;
            -103import org.slf4j.LoggerFactory;
            -104
            -105import 
            org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
            -106import 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/devapidocs/index-all.html
            --
            diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
            index ce3d805..d6491d4 100644
            --- a/devapidocs/index-all.html
            +++ b/devapidocs/index-all.html
            @@ -1526,10 +1526,6 @@
             
             addCurrentScanners(List?
             extends KeyValueScanner) - Method in class 
            org.apache.hadoop.hbase.regionserver.StoreScanner
             
            -addDaughter(Connection,
             RegionInfo, ServerName, long) - Static method in class 
            org.apache.hadoop.hbase.MetaTableAccessor
            -
            -Adds a daughter region entry to meta.
            -
             addDaughtersToPut(Put,
             RegionInfo, RegionInfo) - Static method in class 
            org.apache.hadoop.hbase.MetaTableAccessor
             
             Adds split daughters to the Put
            @@ -2144,18 +2140,6 @@
             
             Adds a hbase:meta row for the specified new region.
             
            -addRegionToMeta(Table,
             RegionInfo) - Static method in class org.apache.hadoop.hbase.MetaTableAccessor
            -
            -Adds a hbase:meta row for the specified new region to the 
            given catalog table.
            -
            -addRegionToMeta(Table,
             RegionInfo, RegionInfo, RegionInfo) - Static method in class 
            org.apache.hadoop.hbase.MetaTableAccessor
            -
            -Adds a (single) hbase:meta row for the specified new region 
            and its daughters.
            -
            -addRegionToMeta(Connection,
             RegionInfo, RegionInfo, RegionInfo) - Static method in class 
            org.apache.hadoop.hbase.MetaTableAccessor
            -
            -Adds a (single) hbase:meta row for the specified new region 
            and its daughters.
            -
             addRegionToRemove(RegionInfo)
             - Method in class org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper.RestoreMetaChanges
             
             addRegionToRestore(RegionInfo)
             - Method in class org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper.RestoreMetaChanges
            @@ -2370,6 +2354,10 @@
             
             Add sources for the given peer cluster on this region 
            server.
             
            +addSpiltsToParent(Connection,
             RegionInfo, RegionInfo, RegionInfo) - Static method in class 
            org.apache.hadoop.hbase.MetaTableAccessor
            +
            +Adds daughter region infos to hbase:meta row for the 
            specified region.
            +
             addSplit(long,
             long) - Method in class org.apache.hadoop.hbase.master.MetricsMasterFileSystem
             
             Record a single instance of a split
            @@ -2975,6 +2963,10 @@
             
             Create a Append operation for the specified row.
             
            +Append(byte[],
             long, NavigableMapbyte[], ListCell) - Constructor 
            for class org.apache.hadoop.hbase.client.Append
            +
            +Construct the Append with user defined data.
            +
             append(Append)
             - Method in interface org.apache.hadoop.hbase.client.AsyncTable
             
             Appends values to one or more columns within a single 
            row.
            @@ -4899,9 +4891,9 @@
             
             backupMasterAddressesZNode
             - Variable in class org.apache.hadoop.hbase.zookeeper.ZNodePaths
             
            -backupMasters
             - Variable in class org.apache.hadoop.hbase.ClusterStatus
            +backupMasterNames
             - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder
             
            -backupMasters
             - Variable in class org.apache.hadoop.hbase.ClusterStatus.Builder
            +backupMasterNames
             - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder.ClusterMetricsImpl
             
             BackupMasterStatusTmpl - Class in org.apache.hadoop.hbase.tmpl.master
             
            @@ -5197,9 +5189,9 @@
             
             balancerChore
             - Variable in class org.apache.hadoop.hbase.master.HMaster
             
            -balancerOn
             - Variable in class org.apache.hadoop.hbase.ClusterStatus
            +balancerOn
             - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder
             
            -balancerOn
             - Variable in class org.apache.hadoop.hbase.ClusterStatus.Builder
            +balancerOn
             - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder.ClusterMetricsImpl
             
             BalancerRegionLoad - Class in org.apache.hadoop.hbase.master.balancer
             
            @@ -6488,6 +6480,10 @@
             
             This class should not be instantiated.
             
            +bloomFilterSize
             - Variable in class org.apache.hadoop.hbase.RegionMetricsBuilder
            +
            +bloomFilterSize
             - Variable in class org.apache.hadoop.hbase.RegionMetricsBuilder.RegionMetricsImpl
            +
             bloomFilterType
             - Variable in class org.apache.hadoop.hbase.regionserver.StoreFileReader
             
             BloomFilterUtil - Class in org.apache.hadoop.hbase.util
            @@ -6929,7 +6925,7 @@
             
             build()
             - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
             
            -build()
             - Method in class org.apache.hadoop.hbase.ClusterStatus.Builder
            +build()
             - Method in class org.apache.hadoop.hbase.ClusterMetricsBuilder
             
             build()
             - Method in interface org.apache.hadoop.hbase.ExtendedCellBuilder
             
            @@ -6951,6 +6947,8 @@
             
             build() - 
            Method in interface org.apache.hadoop.hbase.RawCellBuilder
             
            +build()
             - Method in class org.apache.hadoop.hbase.RegionMetricsBuilder
            +
             build()
             - Method in class org.apache.hadoop.hbase.regionserver.CustomizedScanInfoBuilder
             
             build()
             - Method in class org.apache.hadoop.hbase.regionserver.ScannerContext.Builder
            @@ -6963,6 +6961,8 @@
             
             build()
             - Method in class org.apache.hadoop.hbase.rest.model.ScannerModel.FilterModel.ByteArrayComparableModel
             
            +build()
             - Method in class org.apache.hadoop.hbase.ServerMetricsBuilder
            +
             build()
             - Method in class 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
            index f8df828..568381d 100644
            --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
            +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
            @@ -103,7 +103,7 @@
             
             
             
            -public static interface MetaTableAccessor.CloseableVisitor
            +public static interface MetaTableAccessor.CloseableVisitor
             extends MetaTableAccessor.Visitor, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
             title="class or interface in java.io">Closeable
             Implementations 'visit' a catalog table row but with 
            close() at the end.
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
            index 1bad2b1..7cf80e2 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
            @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -static class MetaTableAccessor.CollectAllVisitor
            +static class MetaTableAccessor.CollectAllVisitor
             extends MetaTableAccessor.CollectingVisitorResult
             Collects all returned.
             
            @@ -214,7 +214,7 @@ extends 
             
             CollectAllVisitor
            -CollectAllVisitor()
            +CollectAllVisitor()
             
             
             
            @@ -231,7 +231,7 @@ extends 
             
             add
            -voidadd(Resultr)
            +voidadd(Resultr)
             
             Specified by:
             addin
             classMetaTableAccessor.CollectingVisitorResult
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
            index 0bd778b..c9a475e 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
            @@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -abstract static class MetaTableAccessor.CollectingVisitorT
            +abstract static class MetaTableAccessor.CollectingVisitorT
             extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
             title="class or interface in java.lang">Object
             implements MetaTableAccessor.Visitor
             A MetaTableAccessor.Visitor that 
            collects content out of passed Result.
            @@ -221,7 +221,7 @@ implements 
             
             results
            -finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">ListT results
            +finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">ListT results
             
             
             
            @@ -238,7 +238,7 @@ implements 
             
             CollectingVisitor
            -CollectingVisitor()
            +CollectingVisitor()
             
             
             
            @@ -255,7 +255,7 @@ implements 
             
             visit
            -publicbooleanvisit(Resultr)
            +publicbooleanvisit(Resultr)
               throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
             title="class or interface in java.io">IOException
             Description copied from 
            interface:MetaTableAccessor.Visitor
             Visit the catalog table row.
            @@ -278,7 +278,7 @@ implements 
             
             add
            -abstractvoidadd(Resultr)
            +abstractvoidadd(Resultr)
             
             
             
            @@ -287,7 +287,7 @@ implements 
             
             getResults
            -http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">ListTgetResults()
            +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
             title="class or interface in java.util">ListTgetResults()
             
             Returns:
             Collected results; wait till visits complete to collect all
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
            index 8d07ed9..aa0de43 100644
            --- 
            a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
            +++ 
            b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
            @@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
             
             
             
            -public abstract static class 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
            --
            diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html 
            b/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
            index 02a06fc..2908977 100644
            --- a/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
            +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
            @@ -25,9 +25,9 @@
             017 */
             018package org.apache.hadoop.hbase.types;
             019
            -020import 
            org.apache.yetus.audience.InterfaceAudience;
            -021import 
            org.apache.hadoop.hbase.util.Order;
            -022import 
            org.apache.hadoop.hbase.util.PositionedByteRange;
            +020import 
            org.apache.hadoop.hbase.util.Order;
            +021import 
            org.apache.hadoop.hbase.util.PositionedByteRange;
            +022import 
            org.apache.yetus.audience.InterfaceAudience;
             023
             024/**
             025 * p
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
            --
            diff --git 
            a/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html 
            b/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
            index c4faacb..c955c7c 100644
            --- a/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
            +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
            @@ -25,10 +25,10 @@
             017 */
             018package org.apache.hadoop.hbase.types;
             019
            -020import 
            org.apache.yetus.audience.InterfaceAudience;
            -021import 
            org.apache.hadoop.hbase.util.Order;
            -022import 
            org.apache.hadoop.hbase.util.PositionedByteRange;
            -023import 
            org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
            +020import 
            org.apache.hadoop.hbase.util.Order;
            +021import 
            org.apache.hadoop.hbase.util.PositionedByteRange;
            +022import 
            org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
            +023import 
            org.apache.yetus.audience.InterfaceAudience;
             024
             025/**
             026 * Wraps an existing {@link DataType} 
            implementation as a fixed-length
            @@ -55,62 +55,78 @@
             047  /**
             048   * Retrieve the maximum length (in 
            bytes) of encoded values.
             049   */
            -050  public int getLength() { return length; 
            }
            -051
            -052  @Override
            -053  public boolean isOrderPreserving() { 
            return base.isOrderPreserving(); }
            -054
            -055  @Override
            -056  public Order getOrder() { return 
            base.getOrder(); }
            -057
            -058  @Override
            -059  public boolean isNullable() { return 
            base.isNullable(); }
            -060
            -061  @Override
            -062  public boolean isSkippable() { return 
            true; }
            +050  public int getLength() {
            +051return length;
            +052  }
            +053
            +054  @Override
            +055  public boolean isOrderPreserving() {
            +056return base.isOrderPreserving();
            +057  }
            +058
            +059  @Override
            +060  public Order getOrder() {
            +061return base.getOrder();
            +062  }
             063
             064  @Override
            -065  public int encodedLength(T val) { 
            return length; }
            -066
            -067  @Override
            -068  public ClassT encodedClass() { 
            return base.encodedClass(); }
            -069
            -070  @Override
            -071  public int skip(PositionedByteRange 
            src) {
            -072src.setPosition(src.getPosition() + 
            this.length);
            -073return this.length;
            -074  }
            -075
            -076  @Override
            -077  public T decode(PositionedByteRange 
            src) {
            -078if (src.getRemaining()  length) 
            {
            -079  throw new 
            IllegalArgumentException("Not enough buffer remaining. src.offset: "
            -080  + src.getOffset() + " 
            src.length: " + src.getLength() + " src.position: "
            -081  + src.getPosition() + " max 
            length: " + length);
            -082}
            -083// create a copy range limited to 
            length bytes. boo.
            -084PositionedByteRange b = new 
            SimplePositionedMutableByteRange(length);
            -085src.get(b.getBytes());
            -086return base.decode(b);
            -087  }
            -088
            -089  @Override
            -090  public int encode(PositionedByteRange 
            dst, T val) {
            -091if (dst.getRemaining()  length) 
            {
            -092  throw new 
            IllegalArgumentException("Not enough buffer remaining. dst.offset: "
            -093  + dst.getOffset() + " 
            dst.length: " + dst.getLength() + " dst.position: "
            -094  + dst.getPosition() + " max 
            length: " + length);
            -095}
            -096int written = base.encode(dst, 
            val);
            -097if (written  length) {
            -098  throw new 
            IllegalArgumentException("Length of encoded value (" + written
            -099  + ") exceeds max length (" + 
            length + ").");
            -100}
            -101// TODO: is the zero-padding 
            appropriate?
            -102for (; written  length; 
            written++) { dst.put((byte) 0x00); }
            -103return written;
            -104  }
            -105}
            +065  public boolean isNullable() {
            +066return base.isNullable();
            +067  }
            +068
            +069  @Override
            +070  public boolean isSkippable() {
            +071return true;
            +072  }
            +073
            +074  @Override
            +075  public int encodedLength(T val) {
            +076return length;
            +077  }
            +078
            +079  @Override
            +080  public ClassT encodedClass() 
            {
            +081return base.encodedClass();
            +082  }
            +083
            +084  @Override
            +085  public int skip(PositionedByteRange 
            src) 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
            index 0b8baa8..c77170b 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
            @@ -30,308 +30,325 @@
             022import org.apache.commons.logging.Log;
             023import 
            org.apache.commons.logging.LogFactory;
             024import 
            org.apache.hadoop.conf.Configured;
            -025import 
            org.apache.hadoop.hbase.TableName;
            -026import 
            org.apache.hadoop.hbase.client.Connection;
            -027import 
            org.apache.hadoop.hbase.client.ConnectionFactory;
            -028import 
            org.apache.hadoop.hbase.client.Put;
            -029import 
            org.apache.hadoop.hbase.client.RegionLocator;
            -030import 
            org.apache.hadoop.hbase.client.Result;
            -031import 
            org.apache.hadoop.hbase.client.ResultScanner;
            -032import 
            org.apache.hadoop.hbase.client.Scan;
            -033import 
            org.apache.hadoop.hbase.client.Table;
            -034import 
            org.apache.hadoop.hbase.filter.KeyOnlyFilter;
            -035import 
            org.apache.hadoop.hbase.util.Bytes;
            -036import org.apache.hadoop.util.Tool;
            -037import 
            org.apache.hadoop.util.ToolRunner;
            -038
            -039import java.io.IOException;
            -040import java.util.ArrayList;
            -041import java.util.List;
            -042import java.util.concurrent.Callable;
            -043import 
            java.util.concurrent.ExecutorService;
            -044import java.util.concurrent.Executors;
            -045import 
            java.util.concurrent.ForkJoinPool;
            -046import java.util.concurrent.Future;
            -047import 
            java.util.concurrent.ThreadFactory;
            -048import 
            java.util.concurrent.ThreadLocalRandom;
            -049import java.util.concurrent.TimeUnit;
            -050
            -051
            -052/**
            -053 * Example on how to use HBase's {@link 
            Connection} and {@link Table} in a
            -054 * multi-threaded environment. Each table 
            is a light weight object
            -055 * that is created and thrown away. 
            Connections are heavy weight objects
            -056 * that hold on to zookeeper connections, 
            async processes, and other state.
            -057 *
            -058 * pre
            -059 * Usage:
            -060 * bin/hbase 
            org.apache.hadoop.hbase.client.example.MultiThreadedClientExample testTableName 
            50
            -061 * /pre
            -062 *
            -063 * p
            -064 * The table should already be created 
            before running the command.
            -065 * This example expects one column family 
            named d.
            -066 * /p
            -067 * p
            -068 * This is meant to show different 
            operations that are likely to be
            -069 * done in a real world application. 
            These operations are:
            -070 * /p
            -071 *
            -072 * ul
            -073 *   li
            -074 * 30% of all operations performed 
            are batch writes.
            -075 * 30 puts are created and sent out 
            at a time.
            -076 * The response for all puts is 
            waited on.
            -077 *   /li
            -078 *   li
            -079 * 20% of all operations are single 
            writes.
            -080 * A single put is sent out and the 
            response is waited for.
            -081 *   /li
            -082 *   li
            -083 * 50% of all operations are scans.
            -084 * These scans start at a random 
            place and scan up to 100 rows.
            -085 *   /li
            -086 * /ul
            -087 *
            -088 */
            -089public class MultiThreadedClientExample 
            extends Configured implements Tool {
            -090  private static final Log LOG = 
            LogFactory.getLog(MultiThreadedClientExample.class);
            -091  private static final int 
            DEFAULT_NUM_OPERATIONS = 50;
            -092
            -093  /**
            -094   * The name of the column family.
            -095   *
            -096   * d for default.
            -097   */
            -098  private static final byte[] FAMILY = 
            Bytes.toBytes("d");
            -099
            -100  /**
            -101   * For the example we're just using one 
            qualifier.
            -102   */
            -103  private static final byte[] QUAL = 
            Bytes.toBytes("test");
            -104
            -105  private final ExecutorService 
            internalPool;
            -106
            -107  private final int threads;
            -108
            -109  public MultiThreadedClientExample() 
            throws IOException {
            -110// Base number of threads.
            -111// This represents the number of 
            threads you application has
            -112// that can be interacting with an 
            hbase client.
            -113this.threads = 
            Runtime.getRuntime().availableProcessors() * 4;
            -114
            -115// Daemon threads are great for 
            things that get shut down.
            -116ThreadFactory threadFactory = new 
            ThreadFactoryBuilder()
            -117
            .setDaemon(true).setNameFormat("internal-pol-%d").build();
            -118
            -119
            -120this.internalPool = 
            Executors.newFixedThreadPool(threads, threadFactory);
            -121  }
            +025import 
            org.apache.hadoop.hbase.CellBuilder;
            +026import 
            org.apache.hadoop.hbase.CellBuilderFactory;
            +027import 
            org.apache.hadoop.hbase.CellBuilderType;
            +028import 
            org.apache.hadoop.hbase.TableName;
            +029import 
            org.apache.hadoop.hbase.client.Connection;
            +030import 
            org.apache.hadoop.hbase.client.ConnectionFactory;
            +031import 
            org.apache.hadoop.hbase.client.Put;
            +032import 
            org.apache.hadoop.hbase.client.RegionLocator;
            +033import 
            org.apache.hadoop.hbase.client.Result;
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
            index 7c59e27..c904c56 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
            @@ -119,4048 +119,4054 @@
             111import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
             112import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
             113import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
            -114import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
            -115import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
            -116import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
            -117import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
            -118import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
            -119import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
            -120import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
            -121import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
            -122import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
            -123import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
            -124import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
            -125import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
            -126import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
            -127import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
            -128import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
            -129import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
            -130import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
            -131import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
            -132import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
            -133import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
            -134import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
            -135import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
            -136import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
            -137import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
            -138import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
            -139import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
            -140import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
            -141import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
            -142import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
            -143import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
            -144import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
            -145import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
            -146import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
            -147import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
            -148import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
            -149import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
            -150import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
            -151import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
            -152import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
            -153import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
            -154import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
            -155import 
            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
            -156import 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html 
            b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
            index 4b51605..4195221 100644
            --- a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
            +++ b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
            @@ -267,62 +267,55 @@
             
             
             
            -ArrayUtils
            -
            -A set of array utility functions that return reasonable 
            values in cases where an array is
            - allocated or if it is null
            -
            -
            -
             AtomicUtils
             
             Utilities related to atomic operations.
             
             
            -
            +
             AvlUtil
             
             Helper class that allows to create and manipulate an 
            AvlTree.
             
             
            -
            +
             AvlUtil.AvlIterableList
             
             Helper class that allows to create and manipulate a linked 
            list of AvlLinkedNodes
             
             
            -
            +
             AvlUtil.AvlLinkedNodeTNode extends AvlUtil.AvlLinkedNode
             
             This class extends the AvlNode and adds two links that will 
            be used in conjunction
              with the AvlIterableList class.
             
             
            -
            +
             AvlUtil.AvlNodeTNode 
            extends AvlUtil.AvlNode
             
             This class represent a node that will be used in an 
            AvlTree.
             
             
            -
            +
             AvlUtil.AvlTree
             
             Helper class that allows to create and manipulate an AVL 
            Tree
             
             
            -
            +
             AvlUtil.AvlTreeIteratorTNode extends AvlUtil.AvlNode
             
             Iterator for the AvlTree
             
             
            -
            +
             Base64
             
             Encodes and decodes to and from Base64 notation.
             
             
            -
            +
             Base64.Base64InputStream
             
             A Base64.Base64InputStream will 
            read data from another
            @@ -330,7 +323,7 @@
              encode/decode to/from Base64 notation on the fly.
             
             
            -
            +
             Base64.Base64OutputStream
             
             A Base64.Base64OutputStream will 
            write data to another
            @@ -338,80 +331,80 @@
              encode/decode to/from Base64 notation on the fly.
             
             
            -
            +
             BloomContext
             
             The bloom context that is used by the StorefileWriter to 
            add the bloom details
              per cell
             
             
            -
            +
             BloomFilterChunk
             
             The basic building block for the CompoundBloomFilter
             
             
            -
            +
             BloomFilterFactory
             
             Handles Bloom filter initialization based on configuration 
            and serialized metadata in the reader
              and writer of HStoreFile.
             
             
            -
            +
             BloomFilterUtil
             
             Utility methods related to BloomFilters
             
             
            -
            +
             BoundedCompletionServiceV
             
             A completion service, close to the one available in the JDK 
            1.7
              However, this ones keeps the list of the future, and allows to cancel them 
            all.
             
             
            -
            +
             BoundedPriorityBlockingQueueE
             
             A generic bounded blocking Priority-Queue.
             
             
            -
            +
             BoundedPriorityBlockingQueue.PriorityQueueE
             
             
            -
            +
             ByteArrayHashKey
             
             
            -
            +
             ByteBufferArray
             
             This class manages an array of ByteBuffers with a default 
            size 4MB.
             
             
            -
            +
             ByteBufferArray.BufferCreatorCallable
             
             A callable that creates buffers of the specified length 
            either onheap/offheap using the
              ByteBufferAllocator
             
             
            -
            +
             ByteBufferUtils
             
             Utility functions for working with byte buffers, such as 
            reading/writing
              variable-length long numbers.
             
             
            -
            +
             ByteRangeUtils
             
             Utility methods for working with ByteRange.
             
             
            -
            +
             Bytes
             
             Utility class that handles byte arrays, conversions to/from 
            other types,
            @@ -419,50 +412,50 @@
              HashSets, and can be used as key in maps or trees.
             
             
            -
            +
             Bytes.ByteArrayComparator
             
             Byte array comparator class.
             
             
            -
            +
             Bytes.LexicographicalComparerHolder
             
             Provides a lexicographical comparer implementation; either 
            a Java
              implementation or a faster implementation based on Unsafe.
             
             
            -
            +
             Bytes.RowEndKeyComparator
             
             A Bytes.ByteArrayComparator that 
            treats the empty array as the largest value.
             
             
            -
            +
             ByteStringer
             
             Hack to workaround HBASE-10304 issue that keeps bubbling up 
            when a mapreduce context.
             
             
            -
            +
             CellHashKey
             
             Extracts the byte for the hash calculation from the given 
            cell
             
             
            -
            +
             Classes
             
             Utilities for class manipulation.
             
             
            -
            +
             ClassLoaderBase
             
             Base class loader that defines couple shared constants used 
            by sub-classes.
             
             
            -
            +
             ClassSize
             
             Class for determining the "size" of a class, an attempt to 
            calculate the
            @@ -471,13 +464,13 @@
              The core of this class is taken from the Derby project
             
             
            -
            +
             ClassSize.MemoryLayout
             
             MemoryLayout abstracts details about the JVM object 
            layout.
             
             
            -
            +
             ClassSize.UnsafeLayout
             
             UnsafeLayout uses Unsafe to guesstimate the object-layout 
            related parameters like object header
            @@ -485,400 +478,400 @@
              See HBASE-15950.
             
             
            -
            +
             ClassSize.UnsafeLayout.HeaderSize
             
             
            -
            +
             CollectionBackedScanner
             
             Utility scanner that wraps a sortable collection and serves 
            as a KeyValueScanner.
             
             
            -
            +
             CollectionUtils
             
             Utility methods for dealing with Collections, including 
            treating null collections as empty.
             
             
            -
            +
             CommonFSUtils
             
             Utility methods for interacting with the underlying file 
            system.
             
             
            -
            +
             CommonFSUtils.StreamCapabilities
             
             
            -
            +
             CompressionTest
             
             Compression validation test.
             
             
            -
            +
             ConcatenatedListsT
             
             A collection class that contains multiple sub-lists, which 
            allows us to not copy lists.
             
             
            -
            +
             

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/Connection.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/client/Connection.html 
            b/devapidocs/org/apache/hadoop/hbase/client/Connection.html
            index e4a7797..2885fe8 100644
            --- a/devapidocs/org/apache/hadoop/hbase/client/Connection.html
            +++ b/devapidocs/org/apache/hadoop/hbase/client/Connection.html
            @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
             
             
             All Known Implementing Classes:
            -ConnectionImplementation, 
            ConnectionUtils.MasterlessConnection, ConnectionUtils.ShortCircuitingClusterConnection
            +ConnectionImplementation, 
            ConnectionUtils.MasterlessConnection, ConnectionUtils.ShortCircuitingClusterConnection,
             SharedConnection
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
            --
            diff --git 
            a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html 
            b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
            index 175e2f3..129feec 100644
            --- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
            +++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
            @@ -200,7 +200,7 @@ extends 
             
             Methods inherited from classorg.apache.hadoop.hbase.HColumnDescriptor
            -compareTo,
             equals,
             getBlocksize,
             getBloomFilterType,
             getCompactionCompression,
             getCompactionCompressionType,
             getCompression,
             getCompressionType,
             getConfiguration,
             getConfigurationValue,
             getDataBlockEncoding,
             getDefaultValues,
             getDFSReplication,
             getEncryptionKey,
             getEncryptionType,
             getInMemoryCompaction,
             getKeepDeletedCells, getMaxVersions,
             getMinVersions,
             getMobCompactPartitionPolicy,
             getMobThreshold,
             getName,
             getNameAsString,
             getScope,
             getStoragePolicy,
             getTimeToLive, getUnit,
             getValue,
             getValue,
             getValue,
             getValues,
             hashCode,
             isBlockCacheEnabled,
             isCacheBloomsOnWrite,
             isCacheDataInL1, isCacheDataOnWrite,
             isCacheIndexesOnWrite,
             isCompressTags,
             isEvictBlocksOnClose,
             isInMemory,
             isLegalFamilyName,
             isMobEnabled,
             isNewVersionBehavior,
             isPrefetchBlocksOnOpen,
             parseFrom,
             remove,
             removeConfiguration,
             setBlockCacheEnabled,
             setBlocksize,
             setBloomFilterType,
             setCacheBloomsOnWrite,
             setCacheDataInL1,
             setCacheDataOnWrite,
             setCacheIndexesOnWrite,
             setCompactionCompressionType,
             setCompressionType,
             setCompressTags,
             setConfiguration,
             setDataBlockEncoding,
             setDFSReplication,
             setEncryptionKey,
             setEncryptionType,
             setEvictBlocksOnClose,
             setInMemory,
             setInMemoryCompaction,
             setKeepDeletedCells,
             setMaxVersions,
             setMinVersions,
             setMobCompactPartitionPolicy,
             setMobEnabled,
             setMobThreshold,
             setNewVersionBehavior,
             setPrefetchBlocksOnOpen, setScope,
             setStoragePolicy,
             setTimeToLive,
             setTimeToLive,
             setValue,
             setValue,
             setVersions,
             toByteArray,
             toString, toStringCustomizedValues
            +compareTo,
             equals,
             getBlocksize,
             getBloomFilterType,
             getCompactionCompression,
             getCompactionCompressionType,
             getCompression,
             getCompressionType,
             getConfiguration,
             getConfigurationValue,
             getDataBlockEncoding,
             getDefaultValues,
             getDFSReplication,
             getEncryptionKey,
             getEncryptionType,
             getInMemoryCompaction,
             getKeepDeletedCells, getMaxVersions,
             getMinVersions,
             getMobCompactPartitionPolicy,
             getMobThreshold,
             getName,
             getNameAsString,
             getScope,
             getStoragePolicy,
             getTimeToLive, getUnit,
             getValue,
             getValue,
             getValue,
             getValues,
             hashCode,
             isBlockCacheEnabled,
             isCacheBloomsOnWrite,
             isCacheDataOnWrite, isCacheIndexesOnWrite,
             isCompressTags,
             isEvictBlocksOnClose,
             isInMemory,
             isLegalFamilyName,
             isMobEnabled,
             isNewVersionBehavior,
             isPrefetchBlocksOnOpen,
             parseFrom,
             remove,
             removeConfiguration,
             setBlockCacheEnabled,
             setBlocksize,
             setBloomFilterType,
             setCacheBloomsOnWrite,
             setCacheDataInL1,
             setCacheDataOnWrite,
             setCacheIndexesOnWrite,
             setCompactionCompressionType,
             setCompressionType,
             setCompressTags,
             setConfiguration,
             setDataBlockEncoding, setDFSReplication,
             setEncryptionKey,
             setEncryptionType,
             setEvictBlocksOnClose,
             setInMemory,
             setInMemoryCompaction,
             setKeepDeletedCells,
             setMaxVersions,
             setMinVersions,
             setMobCompactPartitionPolicy,
             setMobEnabled,
             setMobThreshold,
             setNewVersionBehavior,
             setPrefetchBlocksOnOpen,
             setScope,
             setStoragePolicy,
             setTimeToLive,
             setTimeToLive,
             setValue,
             setValue,
             setVersions,
             toByteArray,
             toString,
             toStringCustomizedValues
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
            --
            diff --git a/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html 
            b/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
            index 641523e..47be427 100644
            --- 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-info.html
            --
            diff --git a/hbase-build-configuration/hbase-archetypes/dependency-info.html 
            b/hbase-build-configuration/hbase-archetypes/dependency-info.html
            index 5d94285..94a5728 100644
            --- a/hbase-build-configuration/hbase-archetypes/dependency-info.html
            +++ b/hbase-build-configuration/hbase-archetypes/dependency-info.html
            @@ -7,7 +7,7 @@
               
             
             
            -
            +
             
             Apache HBase - Archetypes  Dependency Information
             
            @@ -148,7 +148,7 @@
             https://www.apache.org/;>The Apache Software 
            Foundation.
             All rights reserved.  
             
            -  Last Published: 
            2017-12-05
            +  Last Published: 
            2017-12-06
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-management.html
            --
            diff --git 
            a/hbase-build-configuration/hbase-archetypes/dependency-management.html 
            b/hbase-build-configuration/hbase-archetypes/dependency-management.html
            index 324589b..f7f3c3e 100644
            --- a/hbase-build-configuration/hbase-archetypes/dependency-management.html
            +++ b/hbase-build-configuration/hbase-archetypes/dependency-management.html
            @@ -7,7 +7,7 @@
               
             
             
            -
            +
             
             Apache HBase - Archetypes  Project Dependency 
            Management
             
            @@ -775,18 +775,24 @@
             test-jar
             https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
            2.0
             
            +org.apache.hbase
            +http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
            +3.0.0-SNAPSHOT
            +test-jar
            +https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
            2.0
            +
             org.bouncycastle
             http://www.bouncycastle.org/java.html;>bcprov-jdk16
             1.46
             jar
             http://www.bouncycastle.org/licence.html;>Bouncy Castle 
            Licence
            -
            +
             org.hamcrest
             https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
             1.3
             jar
             http://www.opensource.org/licenses/bsd-license.php;>New BSD 
            License
            -
            +
             org.mockito
             http://mockito.org;>mockito-core
             2.1.0
            @@ -804,7 +810,7 @@
             https://www.apache.org/;>The Apache Software 
            Foundation.
             All rights reserved.  
             
            -  Last Published: 
            2017-12-05
            +  Last Published: 
            2017-12-06
             
             
             
            
            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
            --
            diff --git 
            a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
             
            b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
            index 3f41509..e3aa300 100644
            --- 
            a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
            +++ 
            b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
            @@ -7,7 +7,7 @@
               
             
             
            -
            +
             
             Apache HBase - Archetype builder  Project 
            Dependencies
             
            @@ -330,7 +330,7 @@
             https://www.apache.org/;>The Apache Software 
            Foundation.
             All rights reserved.  
             
            -  Last Published: 
            2017-12-05
            +  Last Published: 
            2017-12-06
             
             
             
            
            
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html
            new file mode 100644
            index 000..6c7ef2a
            --- /dev/null
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html
            @@ -0,0 +1,419 @@
            +http://www.w3.org/TR/html4/loose.dtd;>
            +
            +
            +Source code
            +
            +
            +
            +
            +001/**
            +002 * Licensed to the Apache Software 
            Foundation (ASF) under one
            +003 * or more contributor license 
            agreements.  See the NOTICE file
            +004 * distributed with this work for 
            additional information
            +005 * regarding copyright ownership.  The 
            ASF licenses this file
            +006 * to you under the Apache License, 
            Version 2.0 (the
            +007 * "License"); you may not use this file 
            except in compliance
            +008 * with the License.  You may obtain a 
            copy of the License at
            +009 *
            +010 * 
            http://www.apache.org/licenses/LICENSE-2.0
            +011 *
            +012 * Unless required by applicable law or 
            agreed to in writing, software
            +013 * distributed under the License is 
            distributed on an "AS IS" BASIS,
            +014 * WITHOUT WARRANTIES OR CONDITIONS OF 
            ANY KIND, either express or implied.
            +015 * See the License for the specific 
            language governing permissions and
            +016 * limitations under the License.
            +017 */
            +018package 
            org.apache.hadoop.hbase.zookeeper;
            +019
            +020import static 
            org.apache.hadoop.hbase.HConstants.DEFAULT_ZK_SESSION_TIMEOUT;
            +021import static 
            org.apache.hadoop.hbase.HConstants.ZK_SESSION_TIMEOUT;
            +022
            +023import java.io.Closeable;
            +024import java.io.IOException;
            +025import java.util.Arrays;
            +026import java.util.EnumSet;
            +027import 
            java.util.concurrent.CompletableFuture;
            +028import java.util.concurrent.DelayQueue;
            +029import java.util.concurrent.Delayed;
            +030import java.util.concurrent.TimeUnit;
            +031import 
            java.util.concurrent.atomic.AtomicBoolean;
            +032
            +033import org.apache.commons.logging.Log;
            +034import 
            org.apache.commons.logging.LogFactory;
            +035import 
            org.apache.hadoop.conf.Configuration;
            +036import 
            org.apache.yetus.audience.InterfaceAudience;
            +037import 
            org.apache.zookeeper.KeeperException;
            +038import 
            org.apache.zookeeper.KeeperException.Code;
            +039import org.apache.zookeeper.ZooKeeper;
            +040import org.apache.zookeeper.data.Stat;
            +041
            +042import 
            org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
            +043
            +044/**
            +045 * A very simple read only zookeeper 
            implementation without watcher support.
            +046 */
            +047@InterfaceAudience.Private
            +048public final class ReadOnlyZKClient 
            implements Closeable {
            +049
            +050  private static final Log LOG = 
            LogFactory.getLog(ReadOnlyZKClient.class);
            +051
            +052  public static final String 
            RECOVERY_RETRY = "zookeeper.recovery.retry";
            +053
            +054  private static final int 
            DEFAULT_RECOVERY_RETRY = 30;
            +055
            +056  public static final String 
            RECOVERY_RETRY_INTERVAL_MILLIS =
            +057  
            "zookeeper.recovery.retry.intervalmill";
            +058
            +059  private static final int 
            DEFAULT_RECOVERY_RETRY_INTERVAL_MILLIS = 1000;
            +060
            +061  public static final String 
            KEEPALIVE_MILLIS = "zookeeper.keep-alive.time";
            +062
            +063  private static final int 
            DEFAULT_KEEPALIVE_MILLIS = 6;
            +064
            +065  private static final 
            EnumSetCode FAIL_FAST_CODES = EnumSet.of(Code.NOAUTH, 
            Code.AUTHFAILED);
            +066
            +067  private final String connectString;
            +068
            +069  private final int sessionTimeoutMs;
            +070
            +071  private final int maxRetries;
            +072
            +073  private final int retryIntervalMs;
            +074
            +075  private final int keepAliveTimeMs;
            +076
            +077  private static abstract class Task 
            implements Delayed {
            +078
            +079protected long time = 
            System.nanoTime();
            +080
            +081public boolean needZk() {
            +082  return false;
            +083}
            +084
            +085public void exec(ZooKeeper zk) {
            +086}
            +087
            +088public void connectFailed(IOException 
            e) {
            +089}
            +090
            +091public void closed(IOException e) {
            +092}
            +093
            +094@Override
            +095public int compareTo(Delayed o) {
            +096  Task that = (Task) o;
            +097  int c = Long.compare(time, 
            that.time);
            +098  if (c != 0) {
            +099return c;
            +100  }
            +101  return 
            Integer.compare(System.identityHashCode(this), 
            System.identityHashCode(that));
            +102}
            +103
            +104@Override
            +105public long getDelay(TimeUnit unit) 
            {
            +106  return unit.convert(time - 
            System.nanoTime(), TimeUnit.NANOSECONDS);
            +107}
            +108  }
            +109
            +110  private static final Task CLOSE = new 
            Task() {
            +111  };
            +112
            +113  private final DelayQueueTask 
            tasks = new DelayQueue();
            +114
            +115  private final AtomicBoolean closed = 
            new AtomicBoolean(false);
            +116
            +117  private ZooKeeper zookeeper;
            +118
            +119  private String getId() {
            +120return String.format("0x%08x", 
            System.identityHashCode(this));
            +121  }
            +122
            +123  public ReadOnlyZKClient(Configuration 
            conf) {
            +124this.connectString = 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
            index 25e368d..d0f781f 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
            @@ -25,798 +25,798 @@
             017 */
             018package 
            org.apache.hadoop.hbase.io.asyncfs;
             019
            -020import static 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
            -021import static 
            org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
            -022import static 
            org.apache.hadoop.fs.CreateFlag.CREATE;
            -023import static 
            org.apache.hadoop.fs.CreateFlag.OVERWRITE;
            -024import static 
            org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
            -025import static 
            org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
            +020import static 
            org.apache.hadoop.fs.CreateFlag.CREATE;
            +021import static 
            org.apache.hadoop.fs.CreateFlag.OVERWRITE;
            +022import static 
            org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
            +023import static 
            org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
            +024import static 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
            +025import static 
            org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
             026import static 
            org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
             027import static 
            org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;
             028import static 
            org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
             029import static 
            org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
             030
            -031import 
            org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
            -032import 
            org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
            -033import 
            com.google.protobuf.CodedOutputStream;
            -034
            -035import 
            org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
            -036import 
            org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBuf;
            -037import 
            org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufAllocator;
            -038import 
            org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufOutputStream;
            -039import 
            org.apache.hadoop.hbase.shaded.io.netty.buffer.PooledByteBufAllocator;
            -040import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
            -041import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFuture;
            -042import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFutureListener;
            -043import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler;
            -044import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
            -045import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInitializer;
            -046import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPipeline;
            -047import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
            -048import 
            org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler;
            -049import 
            org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
            -050import 
            org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
            -051import 
            org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateEvent;
            -052import 
            org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateHandler;
            -053import 
            org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Future;
            -054import 
            org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.FutureListener;
            -055import 
            org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
            -056
            -057import java.io.IOException;
            -058import 
            java.lang.reflect.InvocationTargetException;
            -059import java.lang.reflect.Method;
            -060import java.util.ArrayList;
            -061import java.util.EnumSet;
            -062import java.util.List;
            -063import java.util.concurrent.TimeUnit;
            -064
            -065import org.apache.commons.logging.Log;
            -066import 
            org.apache.commons.logging.LogFactory;
            -067import 
            org.apache.hadoop.conf.Configuration;
            -068import 
            org.apache.hadoop.crypto.CryptoProtocolVersion;
            -069import 
            org.apache.hadoop.crypto.Encryptor;
            -070import org.apache.hadoop.fs.CreateFlag;
            -071import org.apache.hadoop.fs.FileSystem;
            -072import 
            org.apache.hadoop.fs.FileSystemLinkResolver;
            -073import org.apache.hadoop.fs.Path;
            -074import 
            org.apache.hadoop.fs.UnresolvedLinkException;
            -075import 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
            index d438f22..7c59e27 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
            @@ -1290,8 +1290,8 @@
             1282   CompactType 
            compactType) throws IOException {
             1283switch (compactType) {
             1284  case MOB:
            -1285
            compact(this.connection.getAdminForMaster(), getMobRegionInfo(tableName), 
            major,
            -1286  columnFamily);
            +1285
            compact(this.connection.getAdminForMaster(), 
            RegionInfo.createMobRegionInfo(tableName),
            +1286major, columnFamily);
             1287break;
             1288  case NORMAL:
             1289checkTableExists(tableName);
            @@ -3248,7 +3248,7 @@
             3240  new 
            CallableAdminProtos.GetRegionInfoResponse.CompactionState() {
             3241@Override
             3242public 
            AdminProtos.GetRegionInfoResponse.CompactionState call() throws Exception {
            -3243  RegionInfo info = 
            getMobRegionInfo(tableName);
            +3243  RegionInfo info = 
            RegionInfo.createMobRegionInfo(tableName);
             3244  GetRegionInfoRequest 
            request =
             3245
            RequestConverter.buildGetRegionInfoRequest(info.getRegionName(), true);
             3246  GetRegionInfoResponse 
            response = masterAdmin.getRegionInfo(rpcController, request);
            @@ -3312,7 +3312,7 @@
             3304}
             3305break;
             3306  default:
            -3307throw new 
            IllegalArgumentException("Unknowne compactType: " + compactType);
            +3307throw new 
            IllegalArgumentException("Unknown compactType: " + compactType);
             3308}
             3309if (state != null) {
             3310  return 
            ProtobufUtil.createCompactionState(state);
            @@ -3847,325 +3847,320 @@
             3839});
             3840  }
             3841
            -3842  private RegionInfo 
            getMobRegionInfo(TableName tableName) {
            -3843return 
            RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes(".mob")).setRegionId(0)
            -3844.build();
            -3845  }
            -3846
            -3847  private RpcControllerFactory 
            getRpcControllerFactory() {
            -3848return this.rpcControllerFactory;
            -3849  }
            -3850
            -3851  @Override
            -3852  public void addReplicationPeer(String 
            peerId, ReplicationPeerConfig peerConfig, boolean enabled)
            -3853  throws IOException {
            -3854executeCallable(new 
            MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
            -3855  @Override
            -3856  protected Void rpcCall() throws 
            Exception {
            -3857
            master.addReplicationPeer(getRpcController(),
            -3858  
            RequestConverter.buildAddReplicationPeerRequest(peerId, peerConfig, 
            enabled));
            -3859return null;
            -3860  }
            -3861});
            -3862  }
            -3863
            -3864  @Override
            -3865  public void 
            removeReplicationPeer(String peerId) throws IOException {
            -3866executeCallable(new 
            MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
            -3867  @Override
            -3868  protected Void rpcCall() throws 
            Exception {
            -3869
            master.removeReplicationPeer(getRpcController(),
            -3870  
            RequestConverter.buildRemoveReplicationPeerRequest(peerId));
            -3871return null;
            -3872  }
            -3873});
            -3874  }
            -3875
            -3876  @Override
            -3877  public void 
            enableReplicationPeer(final String peerId) throws IOException {
            -3878executeCallable(new 
            MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
            -3879  @Override
            -3880  protected Void rpcCall() throws 
            Exception {
            -3881
            master.enableReplicationPeer(getRpcController(),
            -3882  
            RequestConverter.buildEnableReplicationPeerRequest(peerId));
            -3883return null;
            -3884  }
            -3885});
            -3886  }
            -3887
            -3888  @Override
            -3889  public void 
            disableReplicationPeer(final String peerId) throws IOException {
            -3890executeCallable(new 
            MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
            -3891  @Override
            -3892  protected Void rpcCall() throws 
            Exception {
            -3893
            master.disableReplicationPeer(getRpcController(),
            -3894  
            RequestConverter.buildDisableReplicationPeerRequest(peerId));
            -3895return null;
            -3896  }
            -3897});
            -3898  }
            -3899
            -3900  @Override
            -3901  public ReplicationPeerConfig 
            getReplicationPeerConfig(final String peerId) throws IOException {
            -3902return executeCallable(new 
            MasterCallableReplicationPeerConfig(getConnection(),
            -3903getRpcControllerFactory()) {
            -3904  @Override
            -3905  protected ReplicationPeerConfig 
            rpcCall() throws Exception {
            -3906GetReplicationPeerConfigResponse 
            response = master.getReplicationPeerConfig(
            -3907  getRpcController(), 
            

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
             
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
            index 29ea7b3..6ed75c9 100644
            --- 
            a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
            +++ 
            b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
            @@ -1313,7093 +1313,7082 @@
             1305
             1306  @Override
             1307  public boolean isSplittable() {
            -1308boolean result = isAvailable() 
             !hasReferences();
            -1309LOG.info("ASKED IF SPLITTABLE " + 
            result + " " + getRegionInfo().getShortNameToLog(),
            -1310  new Throwable("LOGGING: 
            REMOVE"));
            -1311// REMOVE BELOW
            -1312LOG.info("DEBUG LIST ALL FILES");
            -1313for (HStore store : 
            this.stores.values()) {
            -1314  LOG.info("store " + 
            store.getColumnFamilyName());
            -1315  for (HStoreFile sf : 
            store.getStorefiles()) {
            -1316
            LOG.info(sf.toStringDetailed());
            -1317  }
            -1318}
            -1319return result;
            -1320  }
            -1321
            -1322  @Override
            -1323  public boolean isMergeable() {
            -1324if (!isAvailable()) {
            -1325  LOG.debug("Region " + this
            -1326  + " is not mergeable because 
            it is closing or closed");
            -1327  return false;
            -1328}
            -1329if (hasReferences()) {
            -1330  LOG.debug("Region " + this
            -1331  + " is not mergeable because 
            it has references");
            -1332  return false;
            -1333}
            -1334
            -1335return true;
            +1308return isAvailable()  
            !hasReferences();
            +1309  }
            +1310
            +1311  @Override
            +1312  public boolean isMergeable() {
            +1313if (!isAvailable()) {
            +1314  LOG.debug("Region " + this
            +1315  + " is not mergeable because 
            it is closing or closed");
            +1316  return false;
            +1317}
            +1318if (hasReferences()) {
            +1319  LOG.debug("Region " + this
            +1320  + " is not mergeable because 
            it has references");
            +1321  return false;
            +1322}
            +1323
            +1324return true;
            +1325  }
            +1326
            +1327  public boolean areWritesEnabled() {
            +1328synchronized(this.writestate) {
            +1329  return 
            this.writestate.writesEnabled;
            +1330}
            +1331  }
            +1332
            +1333  @VisibleForTesting
            +1334  public MultiVersionConcurrencyControl 
            getMVCC() {
            +1335return mvcc;
             1336  }
             1337
            -1338  public boolean areWritesEnabled() {
            -1339synchronized(this.writestate) {
            -1340  return 
            this.writestate.writesEnabled;
            -1341}
            -1342  }
            -1343
            -1344  @VisibleForTesting
            -1345  public MultiVersionConcurrencyControl 
            getMVCC() {
            -1346return mvcc;
            -1347  }
            -1348
            -1349  @Override
            -1350  public long getMaxFlushedSeqId() {
            -1351return maxFlushedSeqId;
            +1338  @Override
            +1339  public long getMaxFlushedSeqId() {
            +1340return maxFlushedSeqId;
            +1341  }
            +1342
            +1343  /**
            +1344   * @return readpoint considering given 
            IsolationLevel. Pass {@code null} for default
            +1345   */
            +1346  public long 
            getReadPoint(IsolationLevel isolationLevel) {
            +1347if (isolationLevel != null 
             isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
            +1348  // This scan can read even 
            uncommitted transactions
            +1349  return Long.MAX_VALUE;
            +1350}
            +1351return mvcc.getReadPoint();
             1352  }
             1353
            -1354  /**
            -1355   * @return readpoint considering given 
            IsolationLevel. Pass {@code null} for default
            -1356   */
            -1357  public long 
            getReadPoint(IsolationLevel isolationLevel) {
            -1358if (isolationLevel != null 
             isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
            -1359  // This scan can read even 
            uncommitted transactions
            -1360  return Long.MAX_VALUE;
            -1361}
            -1362return mvcc.getReadPoint();
            -1363  }
            -1364
            -1365  public boolean 
            isLoadingCfsOnDemandDefault() {
            -1366return 
            this.isLoadingCfsOnDemandDefault;
            -1367  }
            -1368
            -1369  /**
            -1370   * Close down this HRegion.  Flush the 
            cache, shut down each HStore, don't
            -1371   * service any more calls.
            -1372   *
            -1373   * pThis method could take 
            some time to execute, so don't call it from a
            -1374   * time-sensitive thread.
            -1375   *
            -1376   * @return Vector of all the storage 
            files that the HRegion's component
            -1377   * HStores make use of.  It's a list 
            of all StoreFile objects. Returns empty
            -1378   * vector if already closed and null 
            if judged that it should not close.
            -1379   *
            -1380   * @throws IOException e
            -1381   * @throws DroppedSnapshotException 
            Thrown when replay of wal is required
            -1382   * because a Snapshot was not properly 
            persisted. The region is put in closing mode, and the
            -1383   * caller MUST abort after this.
            -1384   */
            -1385  public Mapbyte[], 
            ListHStoreFile close() throws IOException {
            -1386return close(false);
            -1387  }
            -1388
            -1389  private final Object closeLock = new 
            Object();
            -1390
            -1391  /** Conf key for the periodic flush 
            interval */
            -1392  public 

            [12/51] [partial] hbase-site git commit: Published site at .

            http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
            --
            diff --git 
            a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html 
            b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
            index d98042d..d549086 100644
            --- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
            +++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
            @@ -42,2537 +42,2536 @@
             034
             035import org.apache.commons.logging.Log;
             036import 
            org.apache.commons.logging.LogFactory;
            -037import 
            org.apache.yetus.audience.InterfaceAudience;
            +037import 
            org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
             038import 
            org.apache.hadoop.hbase.util.ByteBufferUtils;
             039import 
            org.apache.hadoop.hbase.util.Bytes;
             040import 
            org.apache.hadoop.hbase.util.ClassSize;
             041import 
            org.apache.hadoop.io.RawComparator;
            -042
            -043import 
            org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
            -044/**
            -045 * An HBase Key/Value. This is the 
            fundamental HBase Type.
            -046 * p
            -047 * HBase applications and users should 
            use the Cell interface and avoid directly using KeyValue and
            -048 * member functions not defined in 
            Cell.
            -049 * p
            -050 * If being used client-side, the primary 
            methods to access individual fields are
            -051 * {@link #getRowArray()}, {@link 
            #getFamilyArray()}, {@link #getQualifierArray()},
            -052 * {@link #getTimestamp()}, and {@link 
            #getValueArray()}. These methods allocate new byte arrays
            -053 * and return copies. Avoid their use 
            server-side.
            -054 * p
            -055 * Instances of this class are immutable. 
            They do not implement Comparable but Comparators are
            -056 * provided. Comparators change with 
            context, whether user table or a catalog table comparison. Its
            -057 * critical you use the appropriate 
            comparator. There are Comparators for normal HFiles, Meta's
            -058 * Hfiles, and bloom filter keys.
            -059 * p
            -060 * KeyValue wraps a byte array and takes 
            offsets and lengths into passed array at where to start
            -061 * interpreting the content as KeyValue. 
            The KeyValue format inside a byte array is:
            -062 * codelt;keylengthgt; 
            lt;valuelengthgt; lt;keygt; 
            lt;valuegt;/code Key is further
            -063 * decomposed as: 
            codelt;rowlengthgt; lt;rowgt; 
            lt;columnfamilylengthgt;
            -064 * lt;columnfamilygt; 
            lt;columnqualifiergt;
            -065 * lt;timestampgt; 
            lt;keytypegt;/code The coderowlength/code 
            maximum is
            -066 * 
            codeShort.MAX_SIZE/code, column family length maximum is 
            codeByte.MAX_SIZE/code, and
            -067 * column qualifier + key length must be 
            lt; codeInteger.MAX_SIZE/code. The column does not
            -068 * contain the family/qualifier 
            delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
            -069 * KeyValue can optionally contain Tags. 
            When it contains tags, it is added in the byte array after
            -070 * the value part. The format for this 
            part is: 
            codelt;tagslengthgt;lt;tagsbytesgt;/code.
            -071 * codetagslength/code 
            maximum is codeShort.MAX_SIZE/code. The 
            codetagsbytes/code
            -072 * contain one or more tags where as each 
            tag is of the form
            -073 * 
            codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
             codetagtype/code is one byte
            -074 * and codetaglength/code 
            maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
            type
            -075 * length and actual tag bytes length.
            -076 */
            -077@InterfaceAudience.Private
            -078public class KeyValue implements 
            ExtendedCell {
            -079  private static final 
            ArrayListTag EMPTY_ARRAY_LIST = new ArrayList();
            -080
            -081  private static final Log LOG = 
            LogFactory.getLog(KeyValue.class);
            -082
            -083  public static final long FIXED_OVERHEAD 
            = ClassSize.OBJECT + // the KeyValue object itself
            -084  ClassSize.REFERENCE + // pointer to 
            "bytes"
            -085  2 * Bytes.SIZEOF_INT + // offset, 
            length
            -086  Bytes.SIZEOF_LONG;// memstoreTS
            -087
            -088  /**
            -089   * Colon character in UTF-8
            -090   */
            -091  public static final char 
            COLUMN_FAMILY_DELIMITER = ':';
            -092
            -093  public static final byte[] 
            COLUMN_FAMILY_DELIM_ARRAY =
            -094new 
            byte[]{COLUMN_FAMILY_DELIMITER};
            -095
            -096  /**
            -097   * Comparator for plain key/values; 
            i.e. non-catalog table key/values. Works on Key portion
            -098   * of KeyValue only.
            -099   * @deprecated Use {@link 
            CellComparator#getInstance()} instead. Deprecated for hbase 2.0, remove for 
            hbase 3.0.
            -100   */
            -101  @Deprecated
            -102  public static final KVComparator 
            COMPARATOR = new KVComparator();
            -103  /**
            -104   * A {@link KVComparator} for 
            codehbase:meta/code catalog table
            -105   * {@link KeyValue}s.
            -106   * @deprecated Use {@link 
            CellComparatorImpl#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove 
            for hbase 3.0.
            -107   */
            -108  @Deprecated
            -109  public static final KVComparator 
            META_COMPARATOR = new MetaComparator();
            -110
            -111  /** Size of the key length field in 
            bytes*/
            -112  public static final int KEY_LENGTH_SIZE 
            = Bytes.SIZEOF_INT;
            -113
            -114  

              1   2   3   >