[11/51] [partial] hbase-site git commit: Published site at 620d70d6186fb800299bcc62ad7179fccfd1be41.

2019-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
index ab92b9d..99959bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/thrift/ThriftServer.html
@@ -149,655 +149,666 @@
 141import org.slf4j.Logger;
 142import org.slf4j.LoggerFactory;
 143
-144import 
org.apache.hbase.thirdparty.com.google.common.base.Joiner;
-145import 
org.apache.hbase.thirdparty.com.google.common.base.Splitter;
-146import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-147import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-148import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
-149import 
org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
-150import 
org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
-151import 
org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
-152
-153/**
-154 * ThriftServer- this class starts up a 
Thrift server which implements the
-155 * Hbase API specified in the 
Hbase.thrift IDL file. The server runs in an
-156 * independent process.
-157 */
-158@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-159public class ThriftServer  extends 
Configured implements Tool {
-160
-161  private static final Logger LOG = 
LoggerFactory.getLogger(ThriftServer.class);
-162
+144import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+145import 
org.apache.hbase.thirdparty.com.google.common.base.Joiner;
+146import 
org.apache.hbase.thirdparty.com.google.common.base.Splitter;
+147import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+148import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+149import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
+150import 
org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
+151import 
org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
+152import 
org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
+153
+154/**
+155 * ThriftServer- this class starts up a 
Thrift server which implements the
+156 * Hbase API specified in the 
Hbase.thrift IDL file. The server runs in an
+157 * independent process.
+158 */
+159@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+160public class ThriftServer  extends 
Configured implements Tool {
+161
+162  private static final Logger LOG = 
LoggerFactory.getLogger(ThriftServer.class);
 163
 164
-165  protected Configuration conf;
-166
-167  protected InfoServer infoServer;
-168
-169  protected TProcessor processor;
-170
-171  protected ThriftMetrics metrics;
-172  protected HBaseServiceHandler 
hbaseServiceHandler;
-173  protected UserGroupInformation 
serviceUGI;
-174  protected boolean httpEnabled;
-175
-176  protected SaslUtil.QualityOfProtection 
qop;
-177  protected String host;
-178  protected int listenPort;
-179
+165
+166  protected Configuration conf;
+167
+168  protected InfoServer infoServer;
+169
+170  protected TProcessor processor;
+171
+172  protected ThriftMetrics metrics;
+173  protected HBaseServiceHandler 
hbaseServiceHandler;
+174  protected UserGroupInformation 
serviceUGI;
+175  protected boolean httpEnabled;
+176
+177  protected SaslUtil.QualityOfProtection 
qop;
+178  protected String host;
+179  protected int listenPort;
 180
-181  protected boolean securityEnabled;
-182  protected boolean doAsEnabled;
-183
-184  protected JvmPauseMonitor 
pauseMonitor;
-185
-186  protected volatile TServer tserver;
-187  protected volatile Server httpServer;
-188
+181
+182  protected boolean securityEnabled;
+183  protected boolean doAsEnabled;
+184
+185  protected JvmPauseMonitor 
pauseMonitor;
+186
+187  protected volatile TServer tserver;
+188  protected volatile Server httpServer;
 189
-190  //
-191  // Main program and support routines
-192  //
-193
-194  public ThriftServer(Configuration conf) 
{
-195this.conf = 
HBaseConfiguration.create(conf);
-196  }
-197
-198  protected void setupParamters() throws 
IOException {
-199// login the server principal (if 
using secure Hadoop)
-200UserProvider userProvider = 
UserProvider.instantiate(conf);
-201securityEnabled = 
userProvider.isHadoopSecurityEnabled()
-202 
userProvider.isHBaseSecurityEnabled();
-203if (securityEnabled) {
-204  host = 
Strings.domainNamePointerToHostName(DNS.getDefaultHost(
-205  
conf.get(THRIFT_DNS_INTERFACE_KEY, "default"),
-206  
conf.get(THRIFT_DNS_NAMESERVER_KEY, "default")));
-207  
userProvider.login(THRIFT_KEYTAB_FILE_KEY, 

[11/51] [partial] hbase-site git commit: Published site at 281d6429e55149cc4c05430dcc1d1dc136d8b245.

2019-01-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
index 4eaf179..6fc7bf8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureStateSerializer.html
@@ -266,12 +266,20 @@
 
 
 protected void
-CreateTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+SwitchRpcThrottleRemoteProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
+CreateTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+
+
+protected void
 ModifyNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
+
+protected void
+SplitWALRemoteProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+
 
 protected void
 TruncateTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
@@ -306,16 +314,24 @@
 
 
 protected void
-ModifyTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+SplitWALProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-InitMetaProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+ModifyTableProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
+InitMetaProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+
+
+protected void
 ReopenTableRegionsProcedure.deserializeStateData(ProcedureStateSerializerserializer)
 
+
+protected void
+SwitchRpcThrottleProcedure.deserializeStateData(ProcedureStateSerializerserializer)
+
 
 protected void
 DeleteNamespaceProcedure.deserializeStateData(ProcedureStateSerializerserializer)
@@ -332,12 +348,20 @@
 
 
 protected void
-CreateTableProcedure.serializeStateData(ProcedureStateSerializerserializer)
+SwitchRpcThrottleRemoteProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
+CreateTableProcedure.serializeStateData(ProcedureStateSerializerserializer)
+
+
+protected void
 ModifyNamespaceProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
+
+protected void
+SplitWALRemoteProcedure.serializeStateData(ProcedureStateSerializerserializer)
+
 
 protected void
 TruncateTableProcedure.serializeStateData(ProcedureStateSerializerserializer)
@@ -372,16 +396,24 @@
 
 
 protected void
-ModifyTableProcedure.serializeStateData(ProcedureStateSerializerserializer)
+SplitWALProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
-InitMetaProcedure.serializeStateData(ProcedureStateSerializerserializer)
+ModifyTableProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
 
 protected void
+InitMetaProcedure.serializeStateData(ProcedureStateSerializerserializer)
+
+
+protected void
 ReopenTableRegionsProcedure.serializeStateData(ProcedureStateSerializerserializer)
 
+
+protected void
+SwitchRpcThrottleProcedure.serializeStateData(ProcedureStateSerializerserializer)
+
 
 protected void
 DeleteNamespaceProcedure.serializeStateData(ProcedureStateSerializerserializer)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureSuspendedException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureSuspendedException.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureSuspendedException.html
index 0aeeb85..7f8754f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureSuspendedException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/ProcedureSuspendedException.html
@@ -83,22 +83,26 @@
 
 
 
-org.apache.hadoop.hbase.master.assignment
+org.apache.hadoop.hbase.master
 
 
 
-org.apache.hadoop.hbase.master.locking
+org.apache.hadoop.hbase.master.assignment
 
 
 
-org.apache.hadoop.hbase.master.procedure
+org.apache.hadoop.hbase.master.locking
 
 
 
-org.apache.hadoop.hbase.master.replication
+org.apache.hadoop.hbase.master.procedure
 
 
 
+org.apache.hadoop.hbase.master.replication
+
+
+
 org.apache.hadoop.hbase.procedure2
 
 
@@ -107,6 +111,26 @@
 
 
 
+
+
+
+Uses of ProcedureSuspendedException in org.apache.hadoop.hbase.master
+
+Methods in org.apache.hadoop.hbase.master
 that throw ProcedureSuspendedException
+
+Modifier and Type
+Method and Description
+
+
+
+ServerName
+SplitWALManager.acquireSplitWALWorker(Procedure?procedure)
+try to acquire an worker from online servers which is 
executring
+
+
+
+
+
 
 
 
@@ -200,6 +224,19 @@
 
 Uses of ProcedureSuspendedException in 

[11/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AdminRpcCall.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
-098import 

[11/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
deleted file mode 100644
index 1a0f64e..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.THBaseServiceMetricsProxy.html
+++ /dev/null
@@ -1,935 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package 
org.apache.hadoop.hbase.thrift2;
-020
-021import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
-022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.columnFamilyDescriptorFromThrift;
-023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.compareOpFromThrift;
-024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
-025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
-026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
-027import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
-028import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
-029import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromHBase;
-030import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromThrift;
-031import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorsFromHBase;
-032import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
-033import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
-034import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
-035import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
-036import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
-037import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
-038import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.splitKeyFromThrift;
-039import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromHBase;
-040import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromThrift;
-041import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorsFromHBase;
-042import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNameFromThrift;
-043import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNamesFromHBase;
-044import static 
org.apache.thrift.TBaseHelper.byteBufferToByteArray;
-045
-046import java.io.IOException;
-047import 
java.lang.reflect.InvocationHandler;
-048import 
java.lang.reflect.InvocationTargetException;
-049import java.lang.reflect.Method;
-050import java.lang.reflect.Proxy;
-051import java.nio.ByteBuffer;
-052import java.util.ArrayList;
-053import java.util.Collections;
-054import java.util.List;
-055import java.util.Map;
-056import 
java.util.concurrent.ConcurrentHashMap;
-057import 
java.util.concurrent.atomic.AtomicInteger;
-058import java.util.regex.Pattern;
-059
-060import 
org.apache.hadoop.conf.Configuration;
-061import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-062import 
org.apache.hadoop.hbase.HRegionLocation;
-063import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-064import 
org.apache.hadoop.hbase.TableName;
-065import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-066import 
org.apache.hadoop.hbase.client.RegionLocator;
-067import 
org.apache.hadoop.hbase.client.ResultScanner;
-068import 
org.apache.hadoop.hbase.client.Table;
-069import 
org.apache.hadoop.hbase.client.TableDescriptor;
-070import 
org.apache.hadoop.hbase.security.UserProvider;
-071import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;

[11/51] [partial] hbase-site git commit: Published site at 3ab895979b643a2980bcdb7fee2078f14b614210.

2019-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.OutputStreamSupplier.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.OutputStreamSupplier.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.OutputStreamSupplier.html
index e39bb4d..d353d02 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.OutputStreamSupplier.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.OutputStreamSupplier.html
@@ -238,6 +238,6 @@ var activeTableTab = "activeTableTab";
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.html
index 00bf3c1..d3780f5 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/CellBlockBuilder.html
@@ -626,6 +626,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
index e48a2b8..df3c69d 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.html
@@ -270,6 +270,6 @@ extends 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/ConnectionId.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/ConnectionId.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/ConnectionId.html
index 431190b..47aba39 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/ConnectionId.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/ConnectionId.html
@@ -447,6 +447,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
index 6cc89d7..062c301 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
@@ -205,6 +205,6 @@ extends com.google.protobuf.RpcChannel, 
com.google.protobuf.BlockingRpcChannel
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.BlockingRpcCallback.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.BlockingRpcCallback.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.BlockingRpcCallback.html
index 63d7ff1..d0913d0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.BlockingRpcCallback.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.BlockingRpcCallback.html
@@ -369,6 +369,6 @@ implements com.google.protobuf.RpcCallbackR
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 


[11/51] [partial] hbase-site git commit: Published site at 7820ba1dbdba58b1002cdfde08eb21aa7a0bb6da.

2018-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09ea0d5f/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
index 0f5a095..50bf692 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
@@ -78,8712 +78,8714 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.hadoop.conf.Configuration;
-074import org.apache.hadoop.fs.FileStatus;
-075import org.apache.hadoop.fs.FileSystem;
-076import 
org.apache.hadoop.fs.LocatedFileStatus;
-077import org.apache.hadoop.fs.Path;
-078import org.apache.hadoop.hbase.Cell;
-079import 
org.apache.hadoop.hbase.CellBuilderType;
-080import 
org.apache.hadoop.hbase.CellComparator;
-081import 
org.apache.hadoop.hbase.CellComparatorImpl;
-082import 
org.apache.hadoop.hbase.CellScanner;
-083import 
org.apache.hadoop.hbase.CellUtil;
-084import 
org.apache.hadoop.hbase.CompareOperator;
-085import 
org.apache.hadoop.hbase.CompoundConfiguration;
-086import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-087import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-088import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-089import 
org.apache.hadoop.hbase.HConstants;
-090import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-091import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-092import 
org.apache.hadoop.hbase.KeyValue;
-093import 
org.apache.hadoop.hbase.KeyValueUtil;
-094import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-095import 
org.apache.hadoop.hbase.NotServingRegionException;
-096import 
org.apache.hadoop.hbase.PrivateCellUtil;
-097import 
org.apache.hadoop.hbase.RegionTooBusyException;
-098import org.apache.hadoop.hbase.Tag;
-099import org.apache.hadoop.hbase.TagUtil;
-100import 
org.apache.hadoop.hbase.UnknownScannerException;
-101import 
org.apache.hadoop.hbase.client.Append;
-102import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-103import 
org.apache.hadoop.hbase.client.CompactionState;
-104import 
org.apache.hadoop.hbase.client.Delete;
-105import 
org.apache.hadoop.hbase.client.Durability;
-106import 
org.apache.hadoop.hbase.client.Get;
-107import 
org.apache.hadoop.hbase.client.Increment;
-108import 
org.apache.hadoop.hbase.client.IsolationLevel;
-109import 
org.apache.hadoop.hbase.client.Mutation;
-110import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-111import 
org.apache.hadoop.hbase.client.Put;
-112import 
org.apache.hadoop.hbase.client.RegionInfo;
-113import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-114import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-115import 
org.apache.hadoop.hbase.client.Result;
-116import 
org.apache.hadoop.hbase.client.RowMutations;
-117import 
org.apache.hadoop.hbase.client.Scan;
-118import 
org.apache.hadoop.hbase.client.TableDescriptor;
-119import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-120import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-121import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-122import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-123import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-124import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-125import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-126import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-127import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-128import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-129import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-130import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-131import 
org.apache.hadoop.hbase.io.HFileLink;
-132import 
org.apache.hadoop.hbase.io.HeapSize;
-133import 
org.apache.hadoop.hbase.io.TimeRange;
-134import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.mob.MobFileCache;
-141import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-142import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-143import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-144import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-145import 

[11/51] [partial] hbase-site git commit: Published site at c448604ceb987d113913f0583452b2abce04db0d.

2018-12-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f8b8424/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html
index 1124f8b..0c29054 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html
@@ -,587 +,592 @@
 1103  
blockStream.write(midKeyMetadata);
 1104
blockWriter.writeHeaderAndData(out);
 1105if (cacheConf != null) {
-1106  HFileBlock blockForCaching = 
blockWriter.getBlockForCaching(cacheConf);
-1107  
cacheConf.getBlockCache().cacheBlock(new BlockCacheKey(nameForCaching,
-1108rootLevelIndexPos, true, 
blockForCaching.getBlockType()), blockForCaching);
-1109}
-1110  }
-
-1112  // Add root index block size
-1113  totalBlockOnDiskSize += 
blockWriter.getOnDiskSizeWithoutHeader();
-1114  totalBlockUncompressedSize +=
-1115  
blockWriter.getUncompressedSizeWithoutHeader();
-1116
-1117  if (LOG.isTraceEnabled()) {
-1118LOG.trace("Wrote a " + numLevels 
+ "-level index with root level at pos "
-1119  + rootLevelIndexPos + ", " + 
rootChunk.getNumEntries()
-1120  + " root-level entries, " + 
totalNumEntries + " total entries, "
-1121  + 
StringUtils.humanReadableInt(this.totalBlockOnDiskSize) +
-1122  " on-disk size, "
-1123  + 
StringUtils.humanReadableInt(totalBlockUncompressedSize) +
-1124  " total uncompressed 
size.");
-1125  }
-1126  return rootLevelIndexPos;
-1127}
-1128
-1129/**
-1130 * Writes the block index data as a 
single level only. Does not do any
-1131 * block framing.
-1132 *
-1133 * @param out the buffered output 
stream to write the index to. Typically a
-1134 *  stream writing into an 
{@link HFile} block.
-1135 * @param description a short 
description of the index being written. Used
-1136 *  in a log message.
-1137 * @throws IOException
-1138 */
-1139public void 
writeSingleLevelIndex(DataOutput out, String description)
-1140throws IOException {
-1141  expectNumLevels(1);
-1142
-1143  if (!singleLevelOnly)
-1144throw new 
IOException("Single-level mode is turned off");
-1145
-1146  if (rootChunk.getNumEntries()  
0)
-1147throw new 
IOException("Root-level entries already added in " +
-1148"single-level mode");
-1149
-1150  rootChunk = curInlineChunk;
-1151  curInlineChunk = new 
BlockIndexChunk();
-1152
-1153  if (LOG.isTraceEnabled()) {
-1154LOG.trace("Wrote a single-level 
" + description + " index with "
-1155  + rootChunk.getNumEntries() + 
" entries, " + rootChunk.getRootSize()
-1156  + " bytes");
-1157  }
-1158  rootChunk.writeRoot(out);
-1159}
-1160
-1161/**
-1162 * Split the current level of the 
block index into intermediate index
-1163 * blocks of permitted size and 
write those blocks to disk. Return the next
-1164 * level of the block index 
referencing those intermediate-level blocks.
-1165 *
-1166 * @param out
-1167 * @param currentLevel the current 
level of the block index, such as the a
-1168 *  chunk referencing all 
leaf-level index blocks
-1169 * @return the parent level block 
index, which becomes the root index after
-1170 * a few (usually zero) 
iterations
-1171 * @throws IOException
-1172 */
-1173private BlockIndexChunk 
writeIntermediateLevel(FSDataOutputStream out,
-1174BlockIndexChunk currentLevel) 
throws IOException {
-1175  // Entries referencing 
intermediate-level blocks we are about to create.
-1176  BlockIndexChunk parent = new 
BlockIndexChunk();
-1177
-1178  // The current intermediate-level 
block index chunk.
-1179  BlockIndexChunk curChunk = new 
BlockIndexChunk();
-1180
-1181  for (int i = 0; i  
currentLevel.getNumEntries(); ++i) {
-1182
curChunk.add(currentLevel.getBlockKey(i),
-1183
currentLevel.getBlockOffset(i), currentLevel.getOnDiskDataSize(i));
-1184
-1185// HBASE-16288: We have to have 
at least minIndexNumEntries(16) items in the index so that
-1186// we won't end up with too-many 
levels for a index with very large rowKeys. Also, if the
-1187// first key is larger than 
maxChunkSize this will cause infinite recursion.
-1188if (i = minIndexNumEntries 
 curChunk.getRootSize() = maxChunkSize) {
-1189  writeIntermediateBlock(out, 
parent, curChunk);
-1190 

[11/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

2018-12-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
index a957d31..62f81b6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
@@ -142,5192 +142,5186 @@
 134import org.apache.hadoop.hbase.wal.WAL;
 135import 
org.apache.hadoop.hbase.wal.WALFactory;
 136import 
org.apache.hadoop.hbase.wal.WALSplitter;
-137import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-138import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-139import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-140import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-141import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-142import 
org.apache.hadoop.ipc.RemoteException;
-143import 
org.apache.hadoop.security.UserGroupInformation;
-144import 
org.apache.hadoop.util.ReflectionUtils;
-145import org.apache.hadoop.util.Tool;
-146import 
org.apache.hadoop.util.ToolRunner;
-147import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-148import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
-149import 
org.apache.yetus.audience.InterfaceAudience;
-150import 
org.apache.yetus.audience.InterfaceStability;
-151import 
org.apache.zookeeper.KeeperException;
-152import org.slf4j.Logger;
-153import org.slf4j.LoggerFactory;
-154
-155import 
org.apache.hbase.thirdparty.com.google.common.base.Joiner;
-156import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-157import 
org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;
-158import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-159import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-160import 
org.apache.hbase.thirdparty.com.google.common.collect.Ordering;
-161import 
org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;
-162
-163import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-164import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-165
-166/**
-167 * HBaseFsck (hbck) is a tool for 
checking and repairing region consistency and
-168 * table integrity problems in a 
corrupted HBase. This tool was written for hbase-1.x. It does not
-169 * work with hbase-2.x; it can read state 
but is not allowed to change state; i.e. effect 'repair'.
-170 * See hbck2 (HBASE-19121) for a hbck 
tool for hbase2.
-171 *
-172 * p
-173 * Region consistency checks verify that 
hbase:meta, region deployment on region
-174 * servers and the state of data in HDFS 
(.regioninfo files) all are in
-175 * accordance.
-176 * p
-177 * Table integrity checks verify that all 
possible row keys resolve to exactly
-178 * one region of a table.  This means 
there are no individual degenerate
-179 * or backwards regions; no holes between 
regions; and that there are no
-180 * overlapping regions.
-181 * p
-182 * The general repair strategy works in 
two phases:
-183 * ol
-184 * li Repair Table Integrity on 
HDFS. (merge or fabricate regions)
-185 * li Repair Region Consistency 
with hbase:meta and assignments
-186 * /ol
-187 * p
-188 * For table integrity repairs, the 
tables' region directories are scanned
-189 * for .regioninfo files.  Each table's 
integrity is then verified.  If there
-190 * are any orphan regions (regions with 
no .regioninfo files) or holes, new
-191 * regions are fabricated.  Backwards 
regions are sidelined as well as empty
-192 * degenerate (endkey==startkey) regions. 
 If there are any overlapping regions,
-193 * a new region is created and all data 
is merged into the new region.
-194 * p
-195 * Table integrity repairs deal solely 
with HDFS and could potentially be done
-196 * offline -- the hbase region servers or 
master do not need to be running.
-197 * This phase can eventually be used to 
completely reconstruct the hbase:meta table in
-198 * an offline fashion.
-199 * p
-200 * Region consistency requires three 
conditions -- 1) valid .regioninfo file
-201 * present in an HDFS region dir,  2) 
valid row with .regioninfo data in META,
-202 * and 3) a region is deployed only at 
the regionserver that was assigned to
-203 * with proper state in the master.
-204 * p
-205 * Region consistency repairs require 
hbase to be online so that hbck can
-206 * contact the HBase master and region 
servers.  The hbck#connect() method must
-207 * first be called successfully.  Much of 
the region consistency information
-208 * is transient and less risky to 
repair.
-209 * p
-210 * If hbck is run from the command line, 
there are a handful of arguments that
-211 * can be used to limit 

[11/51] [partial] hbase-site git commit: Published site at 1acbd36c903b048141866b143507bfce124a5c5f.

2018-11-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html 
b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
index 34b9e2c..183427a 100644
--- a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-final class RSGroupInfoManagerImpl
+final class RSGroupInfoManagerImpl
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RSGroupInfoManager
 This is an implementation of RSGroupInfoManager which makes
@@ -467,7 +467,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -476,7 +476,7 @@ implements 
 
 RSGROUP_TABLE_DESC
-private static finalHTableDescriptor RSGROUP_TABLE_DESC
+private static finalHTableDescriptor RSGROUP_TABLE_DESC
 Table descriptor for hbase:rsgroup catalog 
table
 
 
@@ -486,7 +486,7 @@ implements 
 
 rsGroupMap
-private volatilehttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,RSGroupInfo rsGroupMap
+private volatilehttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,RSGroupInfo rsGroupMap
 
 
 
@@ -495,7 +495,7 @@ implements 
 
 tableMap
-private volatilehttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String tableMap
+private volatilehttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String tableMap
 
 
 
@@ -504,7 +504,7 @@ implements 
 
 masterServices
-private finalMasterServices masterServices
+private finalMasterServices masterServices
 
 
 
@@ -513,7 +513,7 @@ implements 
 
 rsGroupTable
-privateTable rsGroupTable
+privateTable rsGroupTable
 
 
 
@@ -522,7 +522,7 @@ implements 
 
 conn
-private finalClusterConnection conn
+private finalClusterConnection conn
 
 
 
@@ -531,7 +531,7 @@ implements 
 
 watcher
-private finalZKWatcher watcher
+private finalZKWatcher watcher
 
 
 
@@ -540,7 +540,7 @@ implements 
 
 rsGroupStartupWorker
-private finalRSGroupInfoManagerImpl.RSGroupStartupWorker
 rsGroupStartupWorker
+private finalRSGroupInfoManagerImpl.RSGroupStartupWorker
 rsGroupStartupWorker
 
 
 
@@ -549,7 +549,7 @@ implements 
 
 prevRSGroups
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String prevRSGroups
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String prevRSGroups
 
 
 
@@ -558,7 +558,7 @@ implements 
 
 serverEventsListenerThread
-private finalRSGroupInfoManagerImpl.ServerEventsListenerThread
 serverEventsListenerThread
+private finalRSGroupInfoManagerImpl.ServerEventsListenerThread
 serverEventsListenerThread
 
 
 
@@ -567,7 +567,7 @@ implements 
 
 failedOpenUpdaterThread
-privateRSGroupInfoManagerImpl.FailedOpenUpdaterThread
 failedOpenUpdaterThread
+privateRSGroupInfoManagerImpl.FailedOpenUpdaterThread
 failedOpenUpdaterThread
 
 
 
@@ -584,7 +584,7 @@ implements 
 
 RSGroupInfoManagerImpl
-privateRSGroupInfoManagerImpl(MasterServicesmasterServices)
+privateRSGroupInfoManagerImpl(MasterServicesmasterServices)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -606,7 +606,7 @@ implements 
 
 init
-privatevoidinit()
+privatevoidinit()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -620,7 +620,7 @@ implements 
 
 getInstance

[11/51] [partial] hbase-site git commit: Published site at 130057f13774f6b213cdb06952c805a29d59396e.

2018-11-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
index 63a00a7..32d6965 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
@@ -43,7 +43,7 @@
 035import 
org.apache.hadoop.hbase.security.User;
 036import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
 037import 
org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil;
-038import 
org.apache.hadoop.hbase.security.access.TablePermission;
+038import 
org.apache.hadoop.hbase.security.access.UserPermission;
 039import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 040import 
org.apache.hadoop.hbase.util.FSUtils;
 041import 
org.apache.yetus.audience.InterfaceAudience;
@@ -443,10 +443,10 @@
 435
 436  private static SnapshotDescription 
writeAclToSnapshotDescription(SnapshotDescription snapshot,
 437  Configuration conf) throws 
IOException {
-438ListMultimapString, 
TablePermission perms =
-439User.runAsLoginUser(new 
PrivilegedExceptionActionListMultimapString, TablePermission() 
{
+438ListMultimapString, 
UserPermission perms =
+439User.runAsLoginUser(new 
PrivilegedExceptionActionListMultimapString, UserPermission() 
{
 440  @Override
-441  public ListMultimapString, 
TablePermission run() throws Exception {
+441  public ListMultimapString, 
UserPermission run() throws Exception {
 442return 
AccessControlLists.getTablePermissions(conf,
 443  
TableName.valueOf(snapshot.getTable()));
 444  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
index 63a00a7..32d6965 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
@@ -43,7 +43,7 @@
 035import 
org.apache.hadoop.hbase.security.User;
 036import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
 037import 
org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil;
-038import 
org.apache.hadoop.hbase.security.access.TablePermission;
+038import 
org.apache.hadoop.hbase.security.access.UserPermission;
 039import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 040import 
org.apache.hadoop.hbase.util.FSUtils;
 041import 
org.apache.yetus.audience.InterfaceAudience;
@@ -443,10 +443,10 @@
 435
 436  private static SnapshotDescription 
writeAclToSnapshotDescription(SnapshotDescription snapshot,
 437  Configuration conf) throws 
IOException {
-438ListMultimapString, 
TablePermission perms =
-439User.runAsLoginUser(new 
PrivilegedExceptionActionListMultimapString, TablePermission() 
{
+438ListMultimapString, 
UserPermission perms =
+439User.runAsLoginUser(new 
PrivilegedExceptionActionListMultimapString, UserPermission() 
{
 440  @Override
-441  public ListMultimapString, 
TablePermission run() throws Exception {
+441  public ListMultimapString, 
UserPermission run() throws Exception {
 442return 
AccessControlLists.getTablePermissions(conf,
 443  
TableName.valueOf(snapshot.getTable()));
 444  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/downloads.html
--
diff --git a/downloads.html b/downloads.html
index 5c0d243..405ed9e 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase Downloads
 
@@ -461,7 +461,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-11-14
+  Last Published: 
2018-11-15
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 

[11/51] [partial] hbase-site git commit: Published site at d5e4faacc354c1bc4d93efa71ca97ee3a056123e.

2018-10-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b5e107c3/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId.html
 
b/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId.html
new file mode 100644
index 000..0097e60
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId.html
@@ -0,0 +1,357 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId (Apache HBase 
3.0.0-SNAPSHOT Test API)
+
+
+
+
+
+var methods = {"i0":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.master.procedure
+Class 
TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureTEnv
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedureTestSchedulerQueueDeadLock.TestEnv
+
+
+org.apache.hadoop.hbase.master.procedure.TestSchedulerQueueDeadLock.TableExclusiveProcedure
+
+
+org.apache.hadoop.hbase.master.procedure.TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId
+
+
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">Comparableorg.apache.hadoop.hbase.procedure2.ProcedureTestSchedulerQueueDeadLock.TestEnv,
 org.apache.hadoop.hbase.master.procedure.TableProcedureInterface
+
+
+Enclosing class:
+TestSchedulerQueueDeadLock
+
+
+
+public static final class TestSchedulerQueueDeadLock.TableExclusiveProcedureWithId
+extends TestSchedulerQueueDeadLock.TableExclusiveProcedure
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+org.apache.hadoop.hbase.procedure2.Procedure.LockState
+
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hadoop.hbase.master.procedure.TableProcedureInterface
+org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
+
+
+
+
+
+
+
+
+Field Summary
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+NO_PROC_ID, NO_TIMEOUT
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+TableExclusiveProcedureWithId()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected void
+setProcId(longprocId)
+
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.master.procedure.TestSchedulerQueueDeadLock.TableExclusiveProcedure
+acquireLock,
 execute,
 getTableName,
 getTableOperationType,
 holdLock, releaseLock
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure
+abort,
 deserializeStateData,
 rollback,
 serializeStateData
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+addStackIndex, afterReplay, beforeReplay, bypass, compareTo, 
completionCleanup, doExecute, doRollback, elapsedTime, getChildrenLatch, 
getException, getLastUpdate, getNonceKey, getOwner, getParentProcId, 
getProcedureMetrics, getProcId, getProcIdHashCode, getProcName, getResult, 
getRootProcedureId, getRootProcId, getStackIndexes, getState, getSubmittedTime, 
getTimeout, getTimeoutTimestamp, hasChildren, hasException, hasLock, hasOwner, 
hasParent, hasTimeout, haveSameParent, incChildrenLatch, isBypass, isFailed, 
isFinished, isInitializing, isLockedWhenLoading, isRunnable, isSuccess, 

[11/51] [partial] hbase-site git commit: Published site at 3fe8649b2c9ba1271c25e8f476548907e4c7a90d.

2018-10-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8f09a71d/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
index be5c3fc..839d12b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
@@ -139,294 +139,296 @@
 131
 132  public BitSetNode(BitSetNode other, 
boolean resetDelete) {
 133this.start = other.start;
-134this.partial = other.partial;
-135this.modified = 
other.modified.clone();
-136// The resetDelete will be set to 
true when building cleanup tracker.
-137// The intention here is that, if a 
procedure is not modified in this tracker, then we do not
-138// need to take care of it, so we 
will set deleted to true for these bits, i.e, if modified is
-139// 0, then we set deleted to 1, 
otherwise keep it as is. So here, the equation is
-140// deleted |= ~modified, i.e,
-141if (resetDelete) {
-142  this.deleted = new 
long[other.deleted.length];
-143  for (int i = 0; i  
this.deleted.length; ++i) {
-144this.deleted[i] |= 
~(other.modified[i]);
-145  }
-146} else {
-147  this.deleted = 
other.deleted.clone();
-148}
-149  }
-150
-151  public void insertOrUpdate(final long 
procId) {
-152updateState(procId, false);
-153  }
-154
-155  public void delete(final long procId) 
{
-156updateState(procId, true);
-157  }
-158
-159  public long getStart() {
-160return start;
-161  }
-162
-163  public long getEnd() {
-164return start + (modified.length 
 ADDRESS_BITS_PER_WORD) - 1;
-165  }
-166
-167  public boolean contains(final long 
procId) {
-168return start = procId  
procId = getEnd();
-169  }
-170
-171  public DeleteState isDeleted(final long 
procId) {
-172int bitmapIndex = 
getBitmapIndex(procId);
-173int wordIndex = bitmapIndex  
ADDRESS_BITS_PER_WORD;
-174if (wordIndex = deleted.length) 
{
-175  return DeleteState.MAYBE;
-176}
-177return (deleted[wordIndex]  (1L 
 bitmapIndex)) != 0 ? DeleteState.YES : DeleteState.NO;
-178  }
-179
-180  public boolean isModified(long procId) 
{
-181int bitmapIndex = 
getBitmapIndex(procId);
-182int wordIndex = bitmapIndex  
ADDRESS_BITS_PER_WORD;
-183if (wordIndex = modified.length) 
{
-184  return false;
-185}
-186return (modified[wordIndex]  (1L 
 bitmapIndex)) != 0;
-187  }
-188
-189  /**
-190   * @return true, if all the procedures 
has been modified.
-191   */
-192  public boolean isAllModified() {
-193// TODO: cache the value
-194for (int i = 0; i  
modified.length; ++i) {
-195  if ((modified[i] | deleted[i]) != 
WORD_MASK) {
-196return false;
-197  }
-198}
-199return true;
-200  }
-201
-202  /**
-203   * @return all the active procedure ids 
in this bit set.
-204   */
-205  public long[] getActiveProcIds() {
-206ListLong procIds = new 
ArrayList();
-207for (int wordIndex = 0; wordIndex 
 modified.length; wordIndex++) {
-208  if (deleted[wordIndex] == WORD_MASK 
|| modified[wordIndex] == 0) {
-209// This should be the common 
case, where most procedures has been deleted.
-210continue;
-211  }
-212  long baseProcId = getStart() + 
(wordIndex  ADDRESS_BITS_PER_WORD);
-213  for (int i = 0; i  (1  
ADDRESS_BITS_PER_WORD); i++) {
-214long mask = 1L  i;
-215if ((deleted[wordIndex]  
mask) == 0  (modified[wordIndex]  mask) != 0) {
-216  procIds.add(baseProcId + i);
-217}
-218  }
-219}
-220return 
procIds.stream().mapToLong(Long::longValue).toArray();
-221  }
-222
-223  /**
-224   * @return true, if there are no active 
procedures in this BitSetNode, else false.
-225   */
-226  public boolean isEmpty() {
-227// TODO: cache the value
-228for (int i = 0; i  
deleted.length; ++i) {
-229  if (deleted[i] != WORD_MASK) {
-230return false;
-231  }
-232}
-233return true;
-234  }
-235
-236  public void resetModified() {
-237Arrays.fill(modified, 0);
-238  }
-239
-240  public void unsetPartialFlag() {
-241partial = false;
-242for (int i = 0; i  
modified.length; ++i) {
-243  for (int j = 0; j  
BITS_PER_WORD; ++j) {
-244if ((modified[i]  (1L 
 j)) == 0) {
-245  deleted[i] |= (1L  
j);
-246}
-247  }
-248}
-249  }
-250
-251  /**
-252   * Convert to
-253   * 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode
-254   * protobuf.
-255   */
-256  public 
ProcedureProtos.ProcedureStoreTracker.TrackerNode convert() {
-257
ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder builder =
-258  

[11/51] [partial] hbase-site git commit: Published site at 7adf590106826b9e4432cfeee06acdc0ccff8c6e.

2018-10-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/425db230/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html
index 8c65786..3a1281c 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.html
@@ -28,976 +28,860 @@
 020import static 
org.junit.Assert.assertEquals;
 021import static 
org.junit.Assert.assertFalse;
 022import static 
org.junit.Assert.assertTrue;
-023import static org.junit.Assert.fail;
-024
-025import java.io.FileNotFoundException;
-026import java.io.IOException;
-027import java.io.InputStream;
-028import java.io.OutputStream;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Comparator;
-032import java.util.HashSet;
-033import java.util.Set;
-034import 
org.apache.hadoop.conf.Configuration;
-035import org.apache.hadoop.fs.FileStatus;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-039import 
org.apache.hadoop.hbase.HBaseCommonTestingUtility;
-040import 
org.apache.hadoop.hbase.procedure2.Procedure;
-041import 
org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
-042import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
-043import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
-044import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
-045import 
org.apache.hadoop.hbase.procedure2.SequentialProcedure;
-046import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
-047import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
-048import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
-049import 
org.apache.hadoop.hbase.testclassification.MasterTests;
-050import 
org.apache.hadoop.hbase.testclassification.SmallTests;
-051import org.apache.hadoop.io.IOUtils;
-052import org.junit.After;
-053import org.junit.Before;
-054import org.junit.ClassRule;
-055import org.junit.Test;
-056import 
org.junit.experimental.categories.Category;
-057import org.mockito.Mockito;
-058import 
org.mockito.invocation.InvocationOnMock;
-059import org.mockito.stubbing.Answer;
-060import org.slf4j.Logger;
-061import org.slf4j.LoggerFactory;
+023
+024import java.io.FileNotFoundException;
+025import java.io.IOException;
+026import java.io.InputStream;
+027import java.io.OutputStream;
+028import java.util.ArrayList;
+029import java.util.Arrays;
+030import java.util.Comparator;
+031import java.util.HashSet;
+032import java.util.Set;
+033import 
org.apache.hadoop.conf.Configuration;
+034import org.apache.hadoop.fs.FileStatus;
+035import org.apache.hadoop.fs.FileSystem;
+036import org.apache.hadoop.fs.Path;
+037import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+038import 
org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+039import 
org.apache.hadoop.hbase.procedure2.Procedure;
+040import 
org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
+041import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
+042import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
+043import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
+044import 
org.apache.hadoop.hbase.procedure2.SequentialProcedure;
+045import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+046import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
+047import 
org.apache.hadoop.hbase.testclassification.MasterTests;
+048import 
org.apache.hadoop.hbase.testclassification.SmallTests;
+049import org.apache.hadoop.io.IOUtils;
+050import org.junit.After;
+051import org.junit.Before;
+052import org.junit.ClassRule;
+053import org.junit.Test;
+054import 
org.junit.experimental.categories.Category;
+055import org.mockito.Mockito;
+056import 
org.mockito.invocation.InvocationOnMock;
+057import org.mockito.stubbing.Answer;
+058import org.slf4j.Logger;
+059import org.slf4j.LoggerFactory;
+060
+061import 
org.apache.hbase.thirdparty.com.google.protobuf.Int64Value;
 062
-063import 
org.apache.hbase.thirdparty.com.google.protobuf.Int64Value;
-064
-065@Category({MasterTests.class, 
SmallTests.class})
-066public class TestWALProcedureStore {
-067
-068  @ClassRule
-069  public static final HBaseClassTestRule 
CLASS_RULE =
-070  
HBaseClassTestRule.forClass(TestWALProcedureStore.class);
+063@Category({MasterTests.class, 
SmallTests.class})
+064public class TestWALProcedureStore {
+065
+066  @ClassRule
+067  public static final HBaseClassTestRule 
CLASS_RULE =
+068  

[11/51] [partial] hbase-site git commit: Published site at 5fbb227deb365fe812d433fe39b85ac4b0ddee20.

2018-10-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
index 773eb00..fa380ee 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
@@ -180,7 +180,7 @@ extends Procedure
-abort,
 acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 deserializeStateData,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 execute,
 getChildrenLatch, getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName, getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutT
 imestamp, hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch, isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading, needPersistence,
 releaseLock,
 removeStackIndex,
 resetPersistence,
 restoreLock,
 rollback,
 serializeStateData,
 setAbortFailure, 
setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 s
 etOwner, setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 waitInitialized,
 wasExecuted
+abort,
 acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 deserializeStateData,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 execute,
 getChildrenLatch, getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName, 
getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp, href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasChildren--">hasChildren,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasException--">hasException,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasLock--">hasLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasOwner--">hasOwner,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasParent--">hasParent,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasTimeout--">hasTimeout,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#haveSameParent-org.apache.hadoop.hbase.procedure2.Procedure-org.apache.hadoop.hbase.procedure2.Procedure-">haveSameParent,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#holdLock-TEnvironment-">holdLock,
 > incChildrenLatch, isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading, needPersistence,
 releaseLock,
 removeStackIndex,
 resetPersistence,
 restoreLock,
 rollback,
 serializeStateData,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner, setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTime
 out, setTimeoutFailure,
 shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 waitInitialized,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
index adb8b5d..dfd7844 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/class-use/Procedure.html
@@ -652,22 +652,28 @@
  Procedure?proc)
 
 
+static https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+MasterProcedureUtil.unwrapRemoteIOException(Procedureproc)
+This is a version of 

[11/51] [partial] hbase-site git commit: Published site at 821e4d7de2d576189f4288d1c2acf9e9a9471f5c.

2018-10-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/323b17d9/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.html
 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.html
index 5348cd0..6ba8c94 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestProcedureStoreTracker
+public class TestProcedureStoreTracker
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -179,26 +179,30 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 void
-testGetActiveProcIds()
+testGetActiveMinProcId()
 
 
 void
-testLoad()
+testGetActiveProcIds()
 
 
 void
-testPartialTracker()
+testLoad()
 
 
 void
-testRandLoad()
+testPartialTracker()
 
 
 void
-testSeqInsertAndDelete()
+testRandLoad()
 
 
 void
+testSeqInsertAndDelete()
+
+
+void
 testSetDeletedIfModified()
 
 
@@ -229,7 +233,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 CLASS_RULE
-public static finalHBaseClassTestRule CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
 
 
 
@@ -238,7 +242,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -255,7 +259,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 TestProcedureStoreTracker
-publicTestProcedureStoreTracker()
+publicTestProcedureStoreTracker()
 
 
 
@@ -272,7 +276,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testSeqInsertAndDelete
-publicvoidtestSeqInsertAndDelete()
+publicvoidtestSeqInsertAndDelete()
 
 
 
@@ -281,7 +285,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testPartialTracker
-publicvoidtestPartialTracker()
+publicvoidtestPartialTracker()
 
 
 
@@ -290,7 +294,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testBasicCRUD
-publicvoidtestBasicCRUD()
+publicvoidtestBasicCRUD()
 
 
 
@@ -299,7 +303,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testRandLoad
-publicvoidtestRandLoad()
+publicvoidtestRandLoad()
 
 
 
@@ -308,7 +312,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testLoad
-publicvoidtestLoad()
+publicvoidtestLoad()
 
 
 
@@ -317,7 +321,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testDelete
-publicvoidtestDelete()
+publicvoidtestDelete()
 
 
 
@@ -326,16 +330,25 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testSetDeletedIfModified
-publicvoidtestSetDeletedIfModified()
+publicvoidtestSetDeletedIfModified()
 
 
 
 
 
-
+
 
 testGetActiveProcIds
-publicvoidtestGetActiveProcIds()
+publicvoidtestGetActiveProcIds()
+
+
+
+
+
+
+
+testGetActiveMinProcId
+publicvoidtestGetActiveMinProcId()
 
 
 
@@ -366,7 +379,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/323b17d9/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/class-use/TestBitSetNode.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/class-use/TestBitSetNode.html
 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/class-use/TestBitSetNode.html
new file mode 100644
index 000..1297c80
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/store/class-use/TestBitSetNode.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.procedure2.store.TestBitSetNode 
(Apache HBase 3.0.0-SNAPSHOT Test API)
+
+
+
+
+
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Class 
StartMiniClusterOption.Builder
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.StartMiniClusterOption.Builder
+
+
+
+
+
+
+
+Enclosing class:
+StartMiniClusterOption
+
+
+
+public static final class StartMiniClusterOption.Builder
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Builder pattern for creating an StartMiniClusterOption.
+
+ The default values of its fields should be considered public and constant. 
Changing the default
+ values may cause other tests fail.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private boolean
+createRootDir
+
+
+private boolean
+createWALDir
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]
+dataNodeHosts
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.master.HMaster
+masterClass
+
+
+private int
+numDataNodes
+
+
+private int
+numMasters
+
+
+private int
+numRegionServers
+
+
+private int
+numZkServers
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends MiniHBaseCluster.MiniHBaseClusterRegionServer
+rsClass
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+rsPorts
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+private 
+Builder()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+StartMiniClusterOption
+build()
+
+
+StartMiniClusterOption.Builder
+createRootDir(booleancreateRootDir)
+
+
+StartMiniClusterOption.Builder
+createWALDir(booleancreateWALDir)
+
+
+StartMiniClusterOption.Builder
+dataNodeHosts(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String[]dataNodeHosts)
+
+
+StartMiniClusterOption.Builder
+masterClass(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.master.HMastermasterClass)
+
+
+StartMiniClusterOption.Builder
+numDataNodes(intnumDataNodes)
+
+
+StartMiniClusterOption.Builder
+numMasters(intnumMasters)
+
+
+StartMiniClusterOption.Builder
+numRegionServers(intnumRegionServers)
+
+
+StartMiniClusterOption.Builder
+numZkServers(intnumZkServers)
+
+
+StartMiniClusterOption.Builder
+rsClass(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 

[11/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
index dec3733..037ad83 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":9,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":42,"i14":10,"i15":10,"i16":10,"i17":9,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
+var methods = 
{"i0":10,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":42,"i15":10,"i16":10,"i17":10,"i18":10,"i19":9,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class SingleColumnValueFilter
+public class SingleColumnValueFilter
 extends FilterBase
 This filter is used to filter cells based on value. It 
takes a CompareFilter.CompareOp
  operator (equal, greater, not equal, etc), and either a byte [] value or
@@ -337,22 +337,26 @@ extends createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 private boolean
 filterColumnValue(Cellcell)
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRow()
 Filters that never filter by rows based on previously 
gathered state from
@@ -360,37 +364,37 @@ extends 
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 ByteArrayComparable
 getComparator()
 
-
+
 CompareOperator
 getCompareOperator()
 
-
+
 byte[]
 getFamily()
 
-
+
 boolean
 getFilterIfMissing()
 Get whether entire row should be filtered if column is not 
found.
 
 
-
+
 boolean
 getLatestVersionOnly()
 Get whether only the latest version of the column value 
should be compared.
 
 
-
+
 CompareFilter.CompareOp
 getOperator()
 Deprecated.
@@ -398,53 +402,57 @@ extends 
 
 
-
+
 byte[]
 getQualifier()
 
-
+
 boolean
 hasFilterRow()
 Fitlers that never filter by modifying the returned List of 
Cells can
  inherit this implementation that does nothing.
 
 
-
+
+int
+hashCode()
+
+
 boolean
 isFamilyEssential(byte[]name)
 The only CF this filter needs is given column family.
 
 
-
+
 static SingleColumnValueFilter
 parseFrom(byte[]pbBytes)
 
-
+
 void
 reset()
 Filters that are purely stateless and do nothing in their 
reset() methods can inherit
  this null/empty implementation.
 
 
-
+
 void
 setFilterIfMissing(booleanfilterIfMissing)
 Set whether entire row should be filtered if column is not 
found.
 
 
-
+
 void
 setLatestVersionOnly(booleanlatestVersionOnly)
 Set whether only the latest version of the column value 
should be compared.
 
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
@@ -470,7 +478,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 

[11/51] [partial] hbase-site git commit: Published site at 6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4.

2018-08-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
index 2709ea3..4a11f27 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.html
@@ -37,309 +37,299 @@
 029import 
org.apache.hadoop.hbase.executor.EventType;
 030import 
org.apache.hadoop.hbase.regionserver.HRegion;
 031import 
org.apache.hadoop.hbase.regionserver.Region;
-032import 
org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
-033import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
-034import 
org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext;
-035import 
org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext;
-036import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import org.slf4j.Logger;
-039import org.slf4j.LoggerFactory;
-040import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-041/**
-042 * Handles opening of a region on a 
region server.
-043 * p
-044 * This is executed after receiving an 
OPEN RPC from the master or client.
-045 */
-046@InterfaceAudience.Private
-047public class OpenRegionHandler extends 
EventHandler {
-048  private static final Logger LOG = 
LoggerFactory.getLogger(OpenRegionHandler.class);
-049
-050  protected final RegionServerServices 
rsServices;
-051
-052  private final RegionInfo regionInfo;
-053  private final TableDescriptor htd;
-054  private final long masterSystemTime;
-055
-056  public OpenRegionHandler(final Server 
server,
-057  final RegionServerServices 
rsServices, RegionInfo regionInfo,
-058  TableDescriptor htd, long 
masterSystemTime) {
-059this(server, rsServices, regionInfo, 
htd, masterSystemTime, EventType.M_RS_OPEN_REGION);
-060  }
-061
-062  protected OpenRegionHandler(final 
Server server,
-063  final 
RegionServerServices rsServices, final RegionInfo regionInfo,
-064  final 
TableDescriptor htd, long masterSystemTime, EventType eventType) {
-065super(server, eventType);
-066this.rsServices = rsServices;
-067this.regionInfo = regionInfo;
-068this.htd = htd;
-069this.masterSystemTime = 
masterSystemTime;
-070  }
-071
-072  public RegionInfo getRegionInfo() {
-073return regionInfo;
-074  }
-075
-076  @Override
-077  public void process() throws 
IOException {
-078boolean openSuccessful = false;
-079final String regionName = 
regionInfo.getRegionNameAsString();
-080HRegion region = null;
-081
-082try {
-083  if (this.server.isStopped() || 
this.rsServices.isStopping()) {
-084return;
-085  }
-086  final String encodedName = 
regionInfo.getEncodedName();
-087
-088  // 2 different difficult situations 
can occur
-089  // 1) The opening was cancelled. 
This is an expected situation
-090  // 2) The region is now marked as 
online while we're suppose to open. This would be a bug.
-091
-092  // Check that this region is not 
already online
-093  if 
(this.rsServices.getRegion(encodedName) != null) {
-094LOG.error("Region " + encodedName 
+
-095" was already online when we 
started processing the opening. " +
-096"Marking this new attempt as 
failed");
-097return;
-098  }
-099
-100  // Check that we're still supposed 
to open the region.
-101  // If fails, just return.  Someone 
stole the region from under us.
-102  if (!isRegionStillOpening()){
-103LOG.error("Region " + encodedName 
+ " opening cancelled");
-104return;
-105  }
-106
-107  // Open region.  After a successful 
open, failures in subsequent
-108  // processing needs to do a close 
as part of cleanup.
-109  region = openRegion();
-110  if (region == null) {
-111return;
-112  }
-113
-114  if (!updateMeta(region, 
masterSystemTime) || this.server.isStopped() ||
-115  this.rsServices.isStopping()) 
{
-116return;
-117  }
-118
-119  if (!isRegionStillOpening()) {
-120return;
-121  }
-122
-123  // Successful region open, and add 
it to MutableOnlineRegions
-124  
this.rsServices.addRegion(region);
-125  openSuccessful = true;
-126
-127  // Done!  Successful region open
-128  LOG.debug("Opened " + regionName + 
" on " + this.server.getServerName());
-129} finally {
-130  // Do all clean up here
-131  if 

[11/51] [partial] hbase-site git commit: Published site at 63f2d3cbdc8151f5f61f33e0a078c51b9ac076a5.

2018-08-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index cfe74f4..7d4b201 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -153,11 +153,18 @@
 
 
 
+private void
+HMaster.checkUnsupportedProcedure(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends Procedure,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedureMasterProcedureEnvprocsByType)
+In HBASE-20811, we have introduced a new TRSP to 
assign/unassign/move regions, and it is
+ incompatible with the old 
AssignProcedure/UnassignProcedure/MoveRegionProcedure.
+
+
+
 void
 MasterCoprocessorHost.preAbortProcedure(ProcedureExecutorMasterProcedureEnvprocEnv,
  longprocId)
 
-
+
 private long
 ClusterSchemaServiceImpl.submitProcedure(ProcedureMasterProcedureEnvprocedure,
NonceKeynonceKey)
@@ -191,45 +198,58 @@
 
 
 boolean
-MoveRegionProcedure.abort(MasterProcedureEnvenv)
+MoveRegionProcedure.abort(MasterProcedureEnvenv)
+Deprecated.
+
 
 
 protected boolean
-SplitTableRegionProcedure.abort(MasterProcedureEnvenv)
+RegionRemoteProcedureBase.abort(MasterProcedureEnvenv)
 
 
 protected boolean
-MergeTableRegionsProcedure.abort(MasterProcedureEnvenv)
+SplitTableRegionProcedure.abort(MasterProcedureEnvenv)
 
 
 protected boolean
-RegionTransitionProcedure.abort(MasterProcedureEnvenv)
+MergeTableRegionsProcedure.abort(MasterProcedureEnvenv)
 
 
+protected boolean
+RegionTransitionProcedure.abort(MasterProcedureEnvenv)
+Deprecated.
+
+
+
 protected Procedure.LockState
 MergeTableRegionsProcedure.acquireLock(MasterProcedureEnvenv)
 
+
+static TransitRegionStateProcedure
+TransitRegionStateProcedure.assign(MasterProcedureEnvenv,
+  RegionInforegion,
+  ServerNametargetServer)
+
 
-protected Procedure.LockState
-RegionTransitionProcedure.acquireLock(MasterProcedureEnvenv)
+(package private) static void
+AssignmentManagerUtil.checkClosedRegion(MasterProcedureEnvenv,
+ RegionInforegionInfo)
 
 
-protected boolean
-RegionTransitionProcedure.addToRemoteDispatcher(MasterProcedureEnvenv,
- ServerNametargetServer)
-Be careful! At the end of this method, the procedure has 
either succeeded
- and this procedure has been set into a suspended state OR, we failed and
- this procedure has been put back on the scheduler ready for another worker
- to pick it up.
-
+private void
+SplitTableRegionProcedure.checkClosedRegions(MasterProcedureEnvenv)
 
 
+private void
+MergeTableRegionsProcedure.checkClosedRegions(MasterProcedureEnvenv)
+
+
 private static void
 MergeTableRegionsProcedure.checkRegionsToMerge(MasterProcedureEnvenv,
RegionInfo[]regionsToMerge,
booleanforcible)
 
-
+
 private static void
 MergeTableRegionsProcedure.checkRegionsToMerge(MasterProcedureEnvenv,
RegionInforegionToMergeA,
@@ -238,7 +258,7 @@
 One time checks.
 
 
-
+
 private void
 SplitTableRegionProcedure.checkSplittable(MasterProcedureEnvenv,
RegionInforegionToSplit,
@@ -246,21 +266,48 @@
 Check whether the region is splittable
 
 
-
+
 private void
 MergeTableRegionsProcedure.cleanupMergedRegion(MasterProcedureEnvenv)
 Clean up a merged region
 
 
+
+private void
+TransitRegionStateProcedure.closeRegion(MasterProcedureEnvenv,
+   RegionStateNoderegionNode)
+
+
+private StateMachineProcedure.Flow
+TransitRegionStateProcedure.confirmClosed(MasterProcedureEnvenv,
+ RegionStateNoderegionNode)
+
+
+private StateMachineProcedure.Flow
+TransitRegionStateProcedure.confirmOpened(MasterProcedureEnvenv,
+ RegionStateNoderegionNode)
+
+
+private TransitRegionStateProcedure[]
+SplitTableRegionProcedure.createAssignProcedures(MasterProcedureEnvenv)
+
+
+private TransitRegionStateProcedure[]
+MergeTableRegionsProcedure.createAssignProcedures(MasterProcedureEnvenv)
+
 
-private AssignProcedure[]
-SplitTableRegionProcedure.createAssignProcedures(MasterProcedureEnvenv,
-  intregionReplication)
+private static TransitRegionStateProcedure[]
+AssignmentManagerUtil.createAssignProcedures(MasterProcedureEnvenv,
+  https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html?is-external=true;
 title="class or interface in 

[11/51] [partial] hbase-site git commit: Published site at 092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.

2018-08-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
index 63e4b46..514f830 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
@@ -468,15 +468,15 @@
 460   * creating it if necessary.
 461   * @param logEntry
 462   * @param fileNameBeingSplit the file 
being split currently. Used to generate tmp file name.
-463   * @param conf
-464   * @return Path to file into which to 
dump split log edits.
-465   * @throws IOException
-466   */
-467  @SuppressWarnings("deprecation")
-468  @VisibleForTesting
-469  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
-470  Configuration conf)
-471  throws IOException {
+463   * @param tmpDirName of the directory 
used to sideline old recovered edits file
+464   * @param conf
+465   * @return Path to file into which to 
dump split log edits.
+466   * @throws IOException
+467   */
+468  @SuppressWarnings("deprecation")
+469  @VisibleForTesting
+470  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
+471  String tmpDirName, Configuration 
conf) throws IOException {
 472FileSystem fs = 
FileSystem.get(conf);
 473Path rootDir = 
FSUtils.getRootDir(conf);
 474Path tableDir = 
FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName());
@@ -491,7 +491,7 @@
 483  return null;
 484}
 485if (fs.exists(dir)  
fs.isFile(dir)) {
-486  Path tmp = new Path("/tmp");
+486  Path tmp = new Path(tmpDirName);
 487  if (!fs.exists(tmp)) {
 488fs.mkdirs(tmp);
 489  }
@@ -1520,411 +1520,413 @@
 1512 * @return a path with a write for 
that path. caller should close.
 1513 */
 1514WriterAndPath createWAP(byte[] 
region, Entry entry) throws IOException {
-1515  Path regionedits = 
getRegionSplitEditsPath(entry,
-1516  
fileBeingSplit.getPath().getName(), conf);
-1517  if (regionedits == null) {
-1518return null;
-1519  }
-1520  FileSystem rootFs = 
FileSystem.get(conf);
-1521  if (rootFs.exists(regionedits)) 
{
-1522LOG.warn("Found old edits file. 
It could be the "
-1523+ "result of a previous 
failed split attempt. Deleting " + regionedits + ", length="
-1524+ 
rootFs.getFileStatus(regionedits).getLen());
-1525if (!rootFs.delete(regionedits, 
false)) {
-1526  LOG.warn("Failed delete of old 
{}", regionedits);
-1527}
-1528  }
-1529  Writer w = 
createWriter(regionedits);
-1530  LOG.debug("Creating writer 
path={}", regionedits);
-1531  return new 
WriterAndPath(regionedits, w, entry.getKey().getSequenceId());
-1532}
-1533
-1534void filterCellByStore(Entry 
logEntry) {
-1535  Mapbyte[], Long 
maxSeqIdInStores =
-1536  
regionMaxSeqIdInStores.get(Bytes.toString(logEntry.getKey().getEncodedRegionName()));
-1537  if 
(MapUtils.isEmpty(maxSeqIdInStores)) {
-1538return;
-1539  }
-1540  // Create the array list for the 
cells that aren't filtered.
-1541  // We make the assumption that 
most cells will be kept.
-1542  ArrayListCell keptCells = 
new ArrayList(logEntry.getEdit().getCells().size());
-1543  for (Cell cell : 
logEntry.getEdit().getCells()) {
-1544if 
(CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
-1545  keptCells.add(cell);
-1546} else {
-1547  byte[] family = 
CellUtil.cloneFamily(cell);
-1548  Long maxSeqId = 
maxSeqIdInStores.get(family);
-1549  // Do not skip cell even if 
maxSeqId is null. Maybe we are in a rolling upgrade,
-1550  // or the master was crashed 
before and we can not get the information.
-1551  if (maxSeqId == null || 
maxSeqId.longValue()  logEntry.getKey().getSequenceId()) {
-1552keptCells.add(cell);
-1553  }
-1554}
-1555  }
-1556
-1557  // Anything in the keptCells array 
list is still live.
-1558  // So rather than removing the 
cells from the array list
-1559  // which would be an O(n^2) 
operation, we just replace the list
-1560  
logEntry.getEdit().setCells(keptCells);
-1561}
-1562
-1563@Override
-1564public void append(RegionEntryBuffer 
buffer) throws IOException {
-1565  appendBuffer(buffer, true);
-1566}
-1567
-1568WriterAndPath 
appendBuffer(RegionEntryBuffer buffer, boolean reusable) throws IOException{
-1569  ListEntry entries = 
buffer.entryBuffer;
-1570  if (entries.isEmpty()) {
-1571LOG.warn("got an empty buffer, 
skipping");
-1572return null;
-1573  }

[11/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

2018-08-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
index f2fd195..b293714 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -619,1696 +619,1698 @@
 611try {
 612  long procId =
 613  
master.createTable(tableDescriptor, splitKeys, req.getNonceGroup(), 
req.getNonce());
-614  return 
CreateTableResponse.newBuilder().setProcId(procId).build();
-615} catch (IOException ioe) {
-616  throw new ServiceException(ioe);
-617}
-618  }
-619
-620  @Override
-621  public DeleteColumnResponse 
deleteColumn(RpcController controller,
-622  DeleteColumnRequest req) throws 
ServiceException {
-623try {
-624  long procId = 
master.deleteColumn(
-625
ProtobufUtil.toTableName(req.getTableName()),
-626
req.getColumnName().toByteArray(),
-627req.getNonceGroup(),
-628req.getNonce());
-629  if (procId == -1) {
-630// This mean operation was not 
performed in server, so do not set any procId
-631return 
DeleteColumnResponse.newBuilder().build();
-632  } else {
-633return 
DeleteColumnResponse.newBuilder().setProcId(procId).build();
-634  }
-635} catch (IOException ioe) {
-636  throw new ServiceException(ioe);
-637}
-638  }
-639
-640  @Override
-641  public DeleteNamespaceResponse 
deleteNamespace(RpcController controller,
-642  DeleteNamespaceRequest request) 
throws ServiceException {
-643try {
-644  long procId = 
master.deleteNamespace(
-645request.getNamespaceName(),
-646request.getNonceGroup(),
-647request.getNonce());
-648  return 
DeleteNamespaceResponse.newBuilder().setProcId(procId).build();
-649} catch (IOException e) {
-650  throw new ServiceException(e);
-651}
-652  }
-653
-654  /**
-655   * Execute Delete Snapshot operation.
-656   * @return DeleteSnapshotResponse (a 
protobuf wrapped void) if the snapshot existed and was
-657   *deleted properly.
-658   * @throws ServiceException wrapping 
SnapshotDoesNotExistException if specified snapshot did not
-659   *exist.
-660   */
-661  @Override
-662  public DeleteSnapshotResponse 
deleteSnapshot(RpcController controller,
-663  DeleteSnapshotRequest request) 
throws ServiceException {
-664try {
-665  master.checkInitialized();
-666  
master.snapshotManager.checkSnapshotSupport();
-667
-668  
LOG.info(master.getClientIdAuditPrefix() + " delete " + 
request.getSnapshot());
-669  
master.snapshotManager.deleteSnapshot(request.getSnapshot());
-670  return 
DeleteSnapshotResponse.newBuilder().build();
-671} catch (IOException e) {
-672  throw new ServiceException(e);
-673}
-674  }
-675
-676  @Override
-677  public DeleteTableResponse 
deleteTable(RpcController controller,
-678  DeleteTableRequest request) throws 
ServiceException {
-679try {
-680  long procId = 
master.deleteTable(ProtobufUtil.toTableName(
-681  request.getTableName()), 
request.getNonceGroup(), request.getNonce());
-682  return 
DeleteTableResponse.newBuilder().setProcId(procId).build();
-683} catch (IOException ioe) {
-684  throw new ServiceException(ioe);
-685}
-686  }
-687
-688  @Override
-689  public TruncateTableResponse 
truncateTable(RpcController controller, TruncateTableRequest request)
-690  throws ServiceException {
-691try {
-692  long procId = 
master.truncateTable(
-693
ProtobufUtil.toTableName(request.getTableName()),
-694request.getPreserveSplits(),
-695request.getNonceGroup(),
-696request.getNonce());
-697  return 
TruncateTableResponse.newBuilder().setProcId(procId).build();
-698} catch (IOException ioe) {
-699  throw new ServiceException(ioe);
-700}
-701  }
-702
-703  @Override
-704  public DisableTableResponse 
disableTable(RpcController controller,
-705  DisableTableRequest request) throws 
ServiceException {
-706try {
-707  long procId = 
master.disableTable(
-708
ProtobufUtil.toTableName(request.getTableName()),
-709request.getNonceGroup(),
-710request.getNonce());
-711  return 
DisableTableResponse.newBuilder().setProcId(procId).build();
-712} catch (IOException ioe) {
-713  throw new ServiceException(ioe);
-714}
-715  }
-716
-717  @Override
-718  public EnableCatalogJanitorResponse 
enableCatalogJanitor(RpcController c,
-719  EnableCatalogJanitorRequest req) 
throws ServiceException {
-720
rpcPreCheck("enableCatalogJanitor");
-721return 

[11/51] [partial] hbase-site git commit: Published site at ba5d1c1f28301adc99019d9d6c4a04fac98ae511.

2018-07-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
index c5acc87..638f1aa 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.html
@@ -25,239 +25,240 @@
 017 */
 018package org.apache.hadoop.hbase.ipc;
 019
-020import 
org.apache.hbase.thirdparty.com.google.protobuf.Message;
-021import 
org.apache.hbase.thirdparty.com.google.protobuf.Message.Builder;
-022import 
org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
-023
-024import 
org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
-025import 
org.apache.hbase.thirdparty.io.netty.buffer.ByteBufInputStream;
-026import 
org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;
-027import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler;
-028import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;
-029import 
org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise;
-030import 
org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;
-031import 
org.apache.hbase.thirdparty.io.netty.util.concurrent.PromiseCombiner;
-032
-033import java.io.IOException;
-034import java.util.HashMap;
-035import java.util.Map;
-036
-037import 
org.apache.hadoop.hbase.CellScanner;
-038import 
org.apache.yetus.audience.InterfaceAudience;
-039import org.slf4j.Logger;
-040import org.slf4j.LoggerFactory;
-041import 
org.apache.hadoop.hbase.codec.Codec;
-042import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
-043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
-044import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-045import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
-046import 
org.apache.hadoop.io.compress.CompressionCodec;
-047import 
org.apache.hadoop.ipc.RemoteException;
-048
-049/**
-050 * The netty rpc handler.
-051 * @since 2.0.0
-052 */
-053@InterfaceAudience.Private
-054class NettyRpcDuplexHandler extends 
ChannelDuplexHandler {
-055
-056  private static final Logger LOG = 
LoggerFactory.getLogger(NettyRpcDuplexHandler.class);
-057
-058  private final NettyRpcConnection 
conn;
-059
-060  private final CellBlockBuilder 
cellBlockBuilder;
-061
-062  private final Codec codec;
-063
-064  private final CompressionCodec 
compressor;
-065
-066  private final MapInteger, Call 
id2Call = new HashMap();
-067
-068  public 
NettyRpcDuplexHandler(NettyRpcConnection conn, CellBlockBuilder 
cellBlockBuilder,
-069  Codec codec, CompressionCodec 
compressor) {
-070this.conn = conn;
-071this.cellBlockBuilder = 
cellBlockBuilder;
-072this.codec = codec;
-073this.compressor = compressor;
-074
-075  }
-076
-077  private void 
writeRequest(ChannelHandlerContext ctx, Call call, ChannelPromise promise)
-078  throws IOException {
-079id2Call.put(call.id, call);
-080ByteBuf cellBlock = 
cellBlockBuilder.buildCellBlock(codec, compressor, call.cells, ctx.alloc());
-081CellBlockMeta cellBlockMeta;
-082if (cellBlock != null) {
-083  CellBlockMeta.Builder 
cellBlockMetaBuilder = CellBlockMeta.newBuilder();
-084  
cellBlockMetaBuilder.setLength(cellBlock.writerIndex());
-085  cellBlockMeta = 
cellBlockMetaBuilder.build();
-086} else {
-087  cellBlockMeta = null;
-088}
-089RequestHeader requestHeader = 
IPCUtil.buildRequestHeader(call, cellBlockMeta);
-090int sizeWithoutCellBlock = 
IPCUtil.getTotalSizeWhenWrittenDelimited(requestHeader, call.param);
-091int totalSize = cellBlock != null ? 
sizeWithoutCellBlock + cellBlock.writerIndex()
-092: sizeWithoutCellBlock;
-093ByteBuf buf = 
ctx.alloc().buffer(sizeWithoutCellBlock + 4);
-094buf.writeInt(totalSize);
-095try (ByteBufOutputStream bbos = new 
ByteBufOutputStream(buf)) {
-096  
requestHeader.writeDelimitedTo(bbos);
-097  if (call.param != null) {
-098
call.param.writeDelimitedTo(bbos);
-099  }
-100  if (cellBlock != null) {
-101ChannelPromise 
withoutCellBlockPromise = ctx.newPromise();
-102ctx.write(buf, 
withoutCellBlockPromise);
-103ChannelPromise cellBlockPromise = 
ctx.newPromise();
-104ctx.write(cellBlock, 
cellBlockPromise);
-105PromiseCombiner combiner = new 
PromiseCombiner();
-106
combiner.addAll(withoutCellBlockPromise, cellBlockPromise);
-107combiner.finish(promise);
-108  } else {
-109ctx.write(buf, promise);
-110  }
-111}
-112  }
-113
-114  @Override
-115  public void write(ChannelHandlerContext 
ctx, Object msg, 

[11/51] [partial] hbase-site git commit: Published site at b4759ce6e72f50ccd9d410bd5917dc5a515414f1.

2018-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCostFunction.html
index 233dba3..91b9055 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RegionReplicaRackCostFunction.html
@@ -540,1205 +540,1204 @@
 532  
sm.getRegionMetrics().forEach((byte[] regionName, RegionMetrics rm) - {
 533DequeBalancerRegionLoad 
rLoads = oldLoads.get(Bytes.toString(regionName));
 534if (rLoads == null) {
-535  // There was nothing there
-536  rLoads = new 
ArrayDeque();
-537} else if (rLoads.size() = 
numRegionLoadsToRemember) {
-538  rLoads.remove();
-539}
-540rLoads.add(new 
BalancerRegionLoad(rm));
-541
loads.put(Bytes.toString(regionName), rLoads);
-542  });
-543});
-544
-545for(CostFromRegionLoadFunction cost : 
regionLoadFunctions) {
-546  cost.setLoads(loads);
-547}
-548  }
-549
-550  protected void initCosts(Cluster 
cluster) {
-551for (CostFunction c:costFunctions) 
{
-552  c.init(cluster);
-553}
-554  }
-555
-556  protected void 
updateCostsWithAction(Cluster cluster, Action action) {
-557for (CostFunction c : costFunctions) 
{
-558  c.postAction(action);
-559}
-560  }
-561
-562  /**
-563   * Get the names of the cost 
functions
-564   */
-565  public String[] getCostFunctionNames() 
{
-566if (costFunctions == null) return 
null;
-567String[] ret = new 
String[costFunctions.length];
-568for (int i = 0; i  
costFunctions.length; i++) {
-569  CostFunction c = 
costFunctions[i];
-570  ret[i] = 
c.getClass().getSimpleName();
-571}
-572
-573return ret;
-574  }
-575
-576  /**
-577   * This is the main cost function.  It 
will compute a cost associated with a proposed cluster
-578   * state.  All different costs will be 
combined with their multipliers to produce a double cost.
-579   *
-580   * @param cluster The state of the 
cluster
-581   * @param previousCost the previous 
cost. This is used as an early out.
-582   * @return a double of a cost 
associated with the proposed cluster state.  This cost is an
-583   * aggregate of all individual 
cost functions.
-584   */
-585  protected double computeCost(Cluster 
cluster, double previousCost) {
-586double total = 0;
-587
-588for (int i = 0; i  
costFunctions.length; i++) {
-589  CostFunction c = 
costFunctions[i];
-590  this.tempFunctionCosts[i] = 0.0;
-591
-592  if (c.getMultiplier() = 0) {
-593continue;
-594  }
-595
-596  Float multiplier = 
c.getMultiplier();
-597  Double cost = c.cost();
-598
-599  this.tempFunctionCosts[i] = 
multiplier*cost;
-600  total += 
this.tempFunctionCosts[i];
-601
-602  if (total  previousCost) {
-603break;
-604  }
-605}
-606
-607return total;
-608  }
-609
-610  /** Generates a candidate action to be 
applied to the cluster for cost function search */
-611  abstract static class 
CandidateGenerator {
-612abstract Cluster.Action 
generate(Cluster cluster);
-613
-614/**
-615 * From a list of regions pick a 
random one. Null can be returned which
-616 * {@link 
StochasticLoadBalancer#balanceCluster(Map)} recognize as signal to try a region 
move
-617 * rather than swap.
-618 *
-619 * @param clusterThe state of 
the cluster
-620 * @param server index of the 
server
-621 * @param chanceOfNoSwap Chance that 
this will decide to try a move rather
-622 *   than a 
swap.
-623 * @return a random {@link 
RegionInfo} or null if an asymmetrical move is
-624 * suggested.
-625 */
-626protected int 
pickRandomRegion(Cluster cluster, int server, double chanceOfNoSwap) {
-627  // Check to see if this is just a 
move.
-628  if 
(cluster.regionsPerServer[server].length == 0 || RANDOM.nextFloat()  
chanceOfNoSwap) {
-629// signal a move only.
-630return -1;
-631  }
-632  int rand = 
RANDOM.nextInt(cluster.regionsPerServer[server].length);
-633  return 
cluster.regionsPerServer[server][rand];
-634
-635}
-636protected int 
pickRandomServer(Cluster cluster) {
-637  if (cluster.numServers  1) {
-638return -1;
-639  }
-640
-641  return 
RANDOM.nextInt(cluster.numServers);
-642}
-643
-644protected int pickRandomRack(Cluster 
cluster) {
-645  if (cluster.numRacks  1) {

[11/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0c6f447e/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html 
b/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
index 08f91e2..166d35e 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
@@ -1,10 +1,10 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
-类 org.apache.hadoop.hbase.CacheEvictionStats的使用 (Apache HBase 
3.0.0-SNAPSHOT API)
+Uses of Class org.apache.hadoop.hbase.CacheEvictionStats (Apache HBase 
3.0.0-SNAPSHOT API)
 
 
 
@@ -12,7 +12,7 @@
 
 
 
-您的浏览器已禁用 JavaScript。
+JavaScript is disabled on your browser.
 
 
 
 
 
-跳过导航链接
+Skip navigation links
 
 
 
-
-概览
-程序包
-ç±»
-使用
-树
-已过时
-索引
-帮助
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
 
 
 
 
-上一个
-下一个
+Prev
+Next
 
 
-框架
-无框架
+Frames
+NoFrames
 
 
-所有类
+AllClasses
 
 
 
 
-

类的使用
org.apache.hadoop.hbase.CacheEvictionStats

+

Uses of Class
org.apache.hadoop.hbase.CacheEvictionStats

  • - - +
    使用CacheEvictionStats的程序包  
    + - - + + @@ -106,32 +96,32 @@ Table of Contents
  • -

    org.apache.hadoop.hbase.client中CacheEvictionStats的使用

    -
  • Packages that use CacheEvictionStats 
    程序包说明PackageDescription
    org.apache.hadoop.hbase.client -
    Provides HBase Client - -Table of Contents - - Overview -Example API Usage - - - Overview - To administer HBase, create and drop tables, list and alter tables, - use Admin.
    +
    Provides HBase Client
    - +

    Uses of CacheEvictionStats in org.apache.hadoop.hbase.client

    +
    返回CacheEvictionStats的org.apache.hadoop.hbase.client中的方法 
    [11/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5427a45e/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
    --
    diff --git a/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html 
    b/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
    index 215bfca..63a5803 100644
    --- a/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
    +++ b/apidocs/org/apache/hadoop/hbase/UnknownRegionException.html
    @@ -1,6 +1,6 @@
     http://www.w3.org/TR/html4/loose.dtd;>
     
    -
    +
     
     
     
    @@ -20,38 +20,38 @@
     //-->
     
     
    -JavaScript is disabled on your browser.
    +您的浏览器已禁用 JavaScript。
     
     
     
     
     
    -Skip navigation links
    +跳过导航链接
     
     
     
    -
    -Overview
    -Package
    -Class
    -Use
    -Tree
    -Deprecated
    -Index
    -Help
    +
    +概览
    +程序包
    +ç±»
    +使用
    +树
    +已过时
    +索引
    +帮助
     
     
     
     
    -PrevClass
    -NextClass
    +上一个类
    +下一个类
     
     
    -Frames
    -NoFrames
    +框架
    +无框架
     
     
    -AllClasses
    +所有类
     
     
     
     
    org.apache.hadoop.hbase
    -

    Class UnknownRegionException

    +

    ç±» UnknownRegionException


    [11/51] [partial] hbase-site git commit: Published site at 0f23784182ab88649de340d75804e0ff20dcd0fc.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
    index 05e032c..40ef9f4 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
    @@ -25,767 +25,805 @@
     017 */
     018package 
    org.apache.hadoop.hbase.io.asyncfs;
     019
    -020import static 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
    -021import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
    +020import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
    +021import static 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
     022
    -023import 
    org.apache.hbase.thirdparty.com.google.common.base.Charsets;
    -024import 
    org.apache.hbase.thirdparty.com.google.common.base.Throwables;
    -025import 
    org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
    -026import 
    org.apache.hbase.thirdparty.com.google.common.collect.Maps;
    -027import 
    com.google.protobuf.CodedOutputStream;
    -028
    -029import 
    org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
    -030import 
    org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;
    -031import 
    org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf;
    -032import 
    org.apache.hbase.thirdparty.io.netty.buffer.Unpooled;
    -033import 
    org.apache.hbase.thirdparty.io.netty.channel.Channel;
    -034import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler;
    -035import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;
    -036import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapter;
    -037import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;
    -038import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise;
    -039import 
    org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;
    -040import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.LengthFieldBasedFrameDecoder;
    -041import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder;
    -042import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;
    -043import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
    -044import 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;
    -045import 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;
    -046import 
    org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;
    -047
    -048import java.io.IOException;
    -049import java.lang.reflect.Field;
    -050import 
    java.lang.reflect.InvocationTargetException;
    -051import java.lang.reflect.Method;
    -052import java.net.InetAddress;
    -053import java.net.InetSocketAddress;
    -054import java.nio.ByteBuffer;
    -055import 
    java.security.GeneralSecurityException;
    -056import java.util.Arrays;
    -057import java.util.Collections;
    -058import java.util.List;
    -059import java.util.Map;
    -060import java.util.Set;
    -061import java.util.concurrent.TimeUnit;
    -062import 
    java.util.concurrent.atomic.AtomicBoolean;
    -063
    -064import 
    javax.security.auth.callback.Callback;
    -065import 
    javax.security.auth.callback.CallbackHandler;
    -066import 
    javax.security.auth.callback.NameCallback;
    -067import 
    javax.security.auth.callback.PasswordCallback;
    -068import 
    javax.security.auth.callback.UnsupportedCallbackException;
    -069import 
    javax.security.sasl.RealmCallback;
    -070import 
    javax.security.sasl.RealmChoiceCallback;
    -071import javax.security.sasl.Sasl;
    -072import javax.security.sasl.SaslClient;
    -073import 
    javax.security.sasl.SaslException;
    -074
    -075import 
    org.apache.commons.codec.binary.Base64;
    -076import 
    org.apache.commons.lang3.StringUtils;
    -077import 
    org.apache.hadoop.conf.Configuration;
    -078import 
    org.apache.hadoop.crypto.CipherOption;
    -079import 
    org.apache.hadoop.crypto.CipherSuite;
    -080import 
    org.apache.hadoop.crypto.CryptoCodec;
    -081import 
    org.apache.hadoop.crypto.Decryptor;
    -082import 
    org.apache.hadoop.crypto.Encryptor;
    -083import 
    org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
    -084import 
    org.apache.hadoop.fs.FileEncryptionInfo;
    -085import 
    org.apache.yetus.audience.InterfaceAudience;
    -086import org.slf4j.Logger;
    -087import org.slf4j.LoggerFactory;
    -088
    -089import com.google.protobuf.ByteString;
    -090import 
    org.apache.hadoop.hdfs.DFSClient;
    -091import 
    org.apache.hadoop.hdfs.protocol.DatanodeInfo;
    -092import 
    org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
    

    [11/51] [partial] hbase-site git commit: Published site at 85b41f36e01214b6485c9352875c84ebf877dab3.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a5c66de0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    index c10cfbf..a3e2f4a 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    @@ -3371,7 +3371,7 @@
     3363private V result = null;
     3364
     3365private final HBaseAdmin admin;
    -3366private final Long procId;
    +3366protected final Long procId;
     3367
     3368public ProcedureFuture(final 
    HBaseAdmin admin, final Long procId) {
     3369  this.admin = admin;
    @@ -3653,653 +3653,651 @@
     3645 * @return a description of the 
    operation
     3646 */
     3647protected String getDescription() 
    {
    -3648  return "Operation: " + 
    getOperationType() + ", "
    -3649  + "Table Name: " + 
    tableName.getNameWithNamespaceInclAsString();
    -3650
    -3651}
    -3652
    -3653protected abstract class 
    TableWaitForStateCallable implements WaitForStateCallable {
    -3654  @Override
    -3655  public void 
    throwInterruptedException() throws InterruptedIOException {
    -3656throw new 
    InterruptedIOException("Interrupted while waiting for operation: "
    -3657+ getOperationType() + " on 
    table: " + tableName.getNameWithNamespaceInclAsString());
    -3658  }
    -3659
    -3660  @Override
    -3661  public void 
    throwTimeoutException(long elapsedTime) throws TimeoutException {
    -3662throw new TimeoutException("The 
    operation: " + getOperationType() + " on table: " +
    -3663tableName.getNameAsString() 
    + " has not completed after " + elapsedTime + "ms");
    -3664  }
    -3665}
    -3666
    -3667@Override
    -3668protected V 
    postOperationResult(final V result, final long deadlineTs)
    -3669throws IOException, 
    TimeoutException {
    -3670  LOG.info(getDescription() + " 
    completed");
    -3671  return 
    super.postOperationResult(result, deadlineTs);
    -3672}
    -3673
    -3674@Override
    -3675protected V 
    postOperationFailure(final IOException exception, final long deadlineTs)
    -3676throws IOException, 
    TimeoutException {
    -3677  LOG.info(getDescription() + " 
    failed with " + exception.getMessage());
    -3678  return 
    super.postOperationFailure(exception, deadlineTs);
    -3679}
    -3680
    -3681protected void 
    waitForTableEnabled(final long deadlineTs)
    -3682throws IOException, 
    TimeoutException {
    -3683  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3684@Override
    -3685public boolean checkState(int 
    tries) throws IOException {
    -3686  try {
    -3687if 
    (getAdmin().isTableAvailable(tableName)) {
    -3688  return true;
    -3689}
    -3690  } catch 
    (TableNotFoundException tnfe) {
    -3691LOG.debug("Table " + 
    tableName.getNameWithNamespaceInclAsString()
    -3692+ " was not enabled, 
    sleeping. tries=" + tries);
    -3693  }
    -3694  return false;
    -3695}
    -3696  });
    -3697}
    -3698
    -3699protected void 
    waitForTableDisabled(final long deadlineTs)
    -3700throws IOException, 
    TimeoutException {
    -3701  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3702@Override
    -3703public boolean checkState(int 
    tries) throws IOException {
    -3704  return 
    getAdmin().isTableDisabled(tableName);
    -3705}
    -3706  });
    -3707}
    -3708
    -3709protected void 
    waitTableNotFound(final long deadlineTs)
    -3710throws IOException, 
    TimeoutException {
    -3711  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3712@Override
    -3713public boolean checkState(int 
    tries) throws IOException {
    -3714  return 
    !getAdmin().tableExists(tableName);
    -3715}
    -3716  });
    -3717}
    -3718
    -3719protected void 
    waitForSchemaUpdate(final long deadlineTs)
    -3720throws IOException, 
    TimeoutException {
    -3721  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3722@Override
    -3723public boolean checkState(int 
    tries) throws IOException {
    -3724  return 
    getAdmin().getAlterStatus(tableName).getFirst() == 0;
    -3725}
    -3726  });
    -3727}
    -3728
    -3729protected void 
    waitForAllRegionsOnline(final long deadlineTs, final byte[][] splitKeys)
    -3730throws IOException, 
    TimeoutException {
    -3731  final TableDescriptor desc = 
    getTableDescriptor();
    -3732  final AtomicInteger actualRegCount 
    = new AtomicInteger(0);
    -3733  final MetaTableAccessor.Visitor 
    visitor = new MetaTableAccessor.Visitor() {
    -3734@Override
    -3735public boolean visit(Result 
    rowResult) throws IOException {
    -3736  

    [11/51] [partial] hbase-site git commit: Published site at 6198e1fc7dfa85c3bc6b2855f9a5fb5f4b2354ff.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
    b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    index 47f5b30..e42a6a6 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public class MasterRpcServices
    +public class MasterRpcServices
     extends RSRpcServices
     implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService.BlockingInterface
     Implements the master RPC services.
    @@ -744,16 +744,21 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     synchronousBalanceSwitch(booleanb)
     
     
    +org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateResponse
    +transitReplicationPeerSyncReplicationState(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    +  
    org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateRequestrequest)
    +
    +
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse
     truncateTable(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
      
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequestrequest)
     
    -
    +
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse
     unassignRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
       
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequestreq)
     
    -
    +
     org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse
     updateReplicationPeerConfig(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequestrequest)
    @@ -793,7 +798,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -802,7 +807,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     master
    -private finalHMaster master
    +private finalHMaster master
     
     
     
    @@ -819,7 +824,7 @@ 

    [11/51] [partial] hbase-site git commit: Published site at 14087cc919da9f2e0b1a68f701f6365ad9d1d71f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
     
    b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
    index 186e42e..c89b09b 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
    @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable
    +public class MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable
     extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
     title="class or interface in java.lang">Runnable
     This is the runnable that will be executed on the executor 
    every PERIOD number of seconds
    @@ -213,7 +213,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
     
     
     lastRan
    -privatelong lastRan
    +privatelong lastRan
     
     
     
    @@ -222,7 +222,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
     
     
     lastRequestCount
    -privatelong lastRequestCount
    +privatelong lastRequestCount
     
     
     
    @@ -239,7 +239,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
     
     
     RegionServerMetricsWrapperRunnable
    -publicRegionServerMetricsWrapperRunnable()
    +publicRegionServerMetricsWrapperRunnable()
     
     
     
    @@ -256,7 +256,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
     
     
     run
    -publicvoidrun()
    +publicvoidrun()
     
     Specified by:
     https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
     title="class or interface in java.lang">runin 
    interfacehttps://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
     title="class or interface in java.lang">Runnable
    
    
    

    [11/51] [partial] hbase-site git commit: Published site at 72784c2d836a4b977667449d3adec5e8d15453f5.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b11656f/devapidocs/src-html/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.html
    index 205572e..f6a9063 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.html
    @@ -125,75 +125,74 @@
     117String s = 
    getDesc(classifyExs(exceptions));
     118StringBuilder addrs = new 
    StringBuilder(s);
     119addrs.append("servers with issues: 
    ");
    -120SetString uniqAddr = new 
    HashSet();
    -121uniqAddr.addAll(hostnamePort);
    -122
    -123for (String addr : uniqAddr) {
    -124  addrs.append(addr).append(", ");
    -125}
    -126return uniqAddr.isEmpty() ? 
    addrs.toString() : addrs.substring(0, addrs.length() - 2);
    -127  }
    -128
    -129  public String 
    getExhaustiveDescription() {
    -130StringWriter errorWriter = new 
    StringWriter();
    -131PrintWriter pw = new 
    PrintWriter(errorWriter);
    -132for (int i = 0; i  
    this.exceptions.size(); ++i) {
    -133  Throwable t = 
    this.exceptions.get(i);
    -134  Row action = this.actions.get(i);
    -135  String server = 
    this.hostnameAndPort.get(i);
    -136  pw.append("exception");
    -137  if (this.exceptions.size()  1) 
    {
    -138pw.append(" #" + i);
    -139  }
    -140  pw.append(" from " + server + " for 
    "
    -141+ ((action == null) ? "unknown 
    key" : Bytes.toStringBinary(action.getRow(;
    -142  if (t != null) {
    -143pw.println();
    -144t.printStackTrace(pw);
    -145  }
    -146}
    -147pw.flush();
    -148return errorWriter.toString();
    -149  }
    +120SetString uniqAddr = new 
    HashSet(hostnamePort);
    +121
    +122for (String addr : uniqAddr) {
    +123  addrs.append(addr).append(", ");
    +124}
    +125return uniqAddr.isEmpty() ? 
    addrs.toString() : addrs.substring(0, addrs.length() - 2);
    +126  }
    +127
    +128  public String 
    getExhaustiveDescription() {
    +129StringWriter errorWriter = new 
    StringWriter();
    +130PrintWriter pw = new 
    PrintWriter(errorWriter);
    +131for (int i = 0; i  
    this.exceptions.size(); ++i) {
    +132  Throwable t = 
    this.exceptions.get(i);
    +133  Row action = this.actions.get(i);
    +134  String server = 
    this.hostnameAndPort.get(i);
    +135  pw.append("exception");
    +136  if (this.exceptions.size()  1) 
    {
    +137pw.append(" #" + i);
    +138  }
    +139  pw.append(" from " + server + " for 
    "
    +140+ ((action == null) ? "unknown 
    key" : Bytes.toStringBinary(action.getRow(;
    +141  if (t != null) {
    +142pw.println();
    +143t.printStackTrace(pw);
    +144  }
    +145}
    +146pw.flush();
    +147return errorWriter.toString();
    +148  }
    +149
     150
    -151
    -152  public static MapString, 
    Integer classifyExs(ListThrowable ths) {
    -153MapString, Integer cls = new 
    HashMap();
    -154for (Throwable t : ths) {
    -155  if (t == null) continue;
    -156  String name = "";
    -157  if (t instanceof 
    DoNotRetryIOException ||
    -158  t instanceof 
    RegionTooBusyException) {
    -159// If RegionTooBusyException, 
    print message since it has Region name in it.
    -160// RegionTooBusyException message 
    was edited to remove variance. Has regionname, server,
    -161// and why the exception; no 
    longer has duration it waited on lock nor current memsize.
    -162name = t.getMessage();
    -163  } else {
    -164name = 
    t.getClass().getSimpleName();
    -165  }
    -166  Integer i = cls.get(name);
    -167  if (i == null) {
    -168i = 0;
    -169  }
    -170  i += 1;
    -171  cls.put(name, i);
    -172}
    -173return cls;
    -174  }
    -175
    -176  public static String 
    getDesc(MapString,Integer classificaton) {
    -177StringBuilder classificatons =new 
    StringBuilder(11);
    -178for (Map.EntryString, Integer 
    e : classificaton.entrySet()) {
    -179  
    classificatons.append(e.getKey());
    -180  classificatons.append(": ");
    -181  
    classificatons.append(e.getValue());
    -182  classificatons.append(" time");
    -183  
    classificatons.append(pluralize(e.getValue()));
    -184  classificatons.append(", ");
    -185}
    -186return classificatons.toString();
    -187  }
    -188}
    +151  public static MapString, 
    Integer classifyExs(ListThrowable ths) {
    +152MapString, Integer cls = new 
    HashMap();
    +153for (Throwable t : ths) {
    +154  if (t == null) continue;
    +155  String name = "";
    +156  if (t instanceof 
    DoNotRetryIOException ||
    +157  t instanceof 
    RegionTooBusyException) {
    +158// If RegionTooBusyException, 
    print message since it has Region name in it.
    +159// RegionTooBusyException message 
    was edited to remove variance. Has regionname, 

    [11/51] [partial] hbase-site git commit: Published site at 9101fc246f86445006bfbcdfda5cc495016dc280.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/65565d77/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    index eb16038..74bacd8 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    @@ -6,7 +6,7 @@
     
     
     
    -001/*
    +001/**
     002 * Licensed to the Apache Software 
    Foundation (ASF) under one
     003 * or more contributor license 
    agreements.  See the NOTICE file
     004 * distributed with this work for 
    additional information
    @@ -48,692 +48,692 @@
     040import java.util.Map;
     041import java.util.Map.Entry;
     042import java.util.Objects;
    -043import java.util.Set;
    -044import 
    java.util.concurrent.ExecutionException;
    -045import java.util.concurrent.Future;
    -046import java.util.concurrent.TimeUnit;
    -047import 
    java.util.concurrent.TimeoutException;
    -048import 
    java.util.concurrent.atomic.AtomicInteger;
    -049import 
    java.util.concurrent.atomic.AtomicReference;
    -050import java.util.function.Function;
    -051import java.util.regex.Pattern;
    -052import java.util.stream.Collectors;
    -053import javax.servlet.ServletException;
    -054import javax.servlet.http.HttpServlet;
    -055import 
    javax.servlet.http.HttpServletRequest;
    -056import 
    javax.servlet.http.HttpServletResponse;
    -057import 
    org.apache.commons.lang3.StringUtils;
    -058import 
    org.apache.hadoop.conf.Configuration;
    -059import org.apache.hadoop.fs.Path;
    -060import 
    org.apache.hadoop.hbase.ClusterId;
    -061import 
    org.apache.hadoop.hbase.ClusterMetrics;
    -062import 
    org.apache.hadoop.hbase.ClusterMetrics.Option;
    -063import 
    org.apache.hadoop.hbase.ClusterMetricsBuilder;
    -064import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -065import 
    org.apache.hadoop.hbase.HBaseIOException;
    -066import 
    org.apache.hadoop.hbase.HBaseInterfaceAudience;
    -067import 
    org.apache.hadoop.hbase.HConstants;
    -068import 
    org.apache.hadoop.hbase.InvalidFamilyOperationException;
    -069import 
    org.apache.hadoop.hbase.MasterNotRunningException;
    -070import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -071import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -072import 
    org.apache.hadoop.hbase.PleaseHoldException;
    -073import 
    org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
    -074import 
    org.apache.hadoop.hbase.ScheduledChore;
    -075import 
    org.apache.hadoop.hbase.ServerName;
    -076import 
    org.apache.hadoop.hbase.TableDescriptors;
    -077import 
    org.apache.hadoop.hbase.TableName;
    -078import 
    org.apache.hadoop.hbase.TableNotDisabledException;
    -079import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -080import 
    org.apache.hadoop.hbase.UnknownRegionException;
    -081import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
    -082import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
    -083import 
    org.apache.hadoop.hbase.client.MasterSwitchType;
    -084import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -085import 
    org.apache.hadoop.hbase.client.Result;
    -086import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -087import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder;
    -088import 
    org.apache.hadoop.hbase.client.TableState;
    -089import 
    org.apache.hadoop.hbase.client.VersionInfoUtil;
    -090import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
    -091import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -092import 
    org.apache.hadoop.hbase.exceptions.MergeRegionException;
    -093import 
    org.apache.hadoop.hbase.executor.ExecutorType;
    -094import 
    org.apache.hadoop.hbase.favored.FavoredNodesManager;
    -095import 
    org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
    -096import 
    org.apache.hadoop.hbase.http.InfoServer;
    -097import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -098import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -099import 
    org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
    -100import 
    org.apache.hadoop.hbase.log.HBaseMarkers;
    -101import 
    org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
    -102import 
    org.apache.hadoop.hbase.master.assignment.AssignmentManager;
    -103import 
    org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
    -104import 
    org.apache.hadoop.hbase.master.assignment.RegionStates;
    -105import 
    org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
    -106import 
    org.apache.hadoop.hbase.master.balancer.BalancerChore;
    -107import 
    org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
    -108import 
    org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
    -109import 
    org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
    -110import 
    org.apache.hadoop.hbase.master.cleaner.CleanerChore;
    -111import 
    org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
    -112import 
    

    [11/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
    index 83c17c0..9df0225 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
    @@ -54,323 +54,362 @@
     046import org.apache.hadoop.io.IOUtils;
     047
     048import 
    org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
    -049
    +049import 
    org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
     050
    -051/**
    -052 * Compression in this class is lifted 
    off Compressor/KeyValueCompression.
    -053 * This is a pure coincidence... they are 
    independent and don't have to be compatible.
    -054 *
    -055 * This codec is used at server side for 
    writing cells to WAL as well as for sending edits
    -056 * as part of the distributed splitting 
    process.
    -057 */
    -058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
    -059  HBaseInterfaceAudience.PHOENIX, 
    HBaseInterfaceAudience.CONFIG})
    -060public class WALCellCodec implements 
    Codec {
    -061  /** Configuration key for the class to 
    use when encoding cells in the WAL */
    -062  public static final String 
    WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
    -063
    -064  protected final CompressionContext 
    compression;
    -065  protected final ByteStringUncompressor 
    statelessUncompressor = new ByteStringUncompressor() {
    -066@Override
    -067public byte[] uncompress(ByteString 
    data, Dictionary dict) throws IOException {
    -068  return 
    WALCellCodec.uncompressByteString(data, dict);
    -069}
    -070  };
    -071
    -072  /**
    -073   * bAll subclasses must 
    implement a no argument constructor/b
    -074   */
    -075  public WALCellCodec() {
    -076this.compression = null;
    -077  }
    -078
    -079  /**
    -080   * Default constructor - ball 
    subclasses must implement a constructor with this signature /b
    -081   * if they are to be dynamically loaded 
    from the {@link Configuration}.
    -082   * @param conf configuration to 
    configure ttthis/tt
    -083   * @param compression compression the 
    codec should support, can be ttnull/tt to indicate no
    -084   *  compression
    -085   */
    -086  public WALCellCodec(Configuration conf, 
    CompressionContext compression) {
    -087this.compression = compression;
    -088  }
    -089
    -090  public static String 
    getWALCellCodecClass(Configuration conf) {
    -091return 
    conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
    -092  }
    -093
    -094  /**
    -095   * Create and setup a {@link 
    WALCellCodec} from the {@code cellCodecClsName} and
    -096   * CompressionContext, if {@code 
    cellCodecClsName} is specified.
    -097   * Otherwise Cell Codec classname is 
    read from {@link Configuration}.
    -098   * Fully prepares the codec for use.
    -099   * @param conf {@link Configuration} to 
    read for the user-specified codec. If none is specified,
    -100   *  uses a {@link 
    WALCellCodec}.
    -101   * @param cellCodecClsName name of 
    codec
    -102   * @param compression compression the 
    codec should use
    -103   * @return a {@link WALCellCodec} ready 
    for use.
    -104   * @throws 
    UnsupportedOperationException if the codec cannot be instantiated
    -105   */
    -106
    -107  public static WALCellCodec 
    create(Configuration conf, String cellCodecClsName,
    -108  CompressionContext compression) 
    throws UnsupportedOperationException {
    -109if (cellCodecClsName == null) {
    -110  cellCodecClsName = 
    getWALCellCodecClass(conf);
    -111}
    -112return 
    ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
    -113{ Configuration.class, 
    CompressionContext.class }, new Object[] { conf, compression });
    -114  }
    -115
    -116  /**
    -117   * Create and setup a {@link 
    WALCellCodec} from the
    -118   * CompressionContext.
    -119   * Cell Codec classname is read from 
    {@link Configuration}.
    -120   * Fully prepares the codec for use.
    -121   * @param conf {@link Configuration} to 
    read for the user-specified codec. If none is specified,
    -122   *  uses a {@link 
    WALCellCodec}.
    -123   * @param compression compression the 
    codec should use
    -124   * @return a {@link WALCellCodec} ready 
    for use.
    -125   * @throws 
    UnsupportedOperationException if the codec cannot be instantiated
    -126   */
    -127  public static WALCellCodec 
    create(Configuration conf,
    -128  CompressionContext compression) 
    throws UnsupportedOperationException {
    -129String cellCodecClsName = 
    getWALCellCodecClass(conf);
    -130return 
    ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
    -131{ Configuration.class, 
    CompressionContext.class }, new Object[] { conf, compression });
    

    [11/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    index 8b5905e..98e88a4 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    @@ -35,465 +35,461 @@
     027import 
    org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
     028import 
    org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
     029import 
    org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
    -030import 
    org.apache.hadoop.hbase.procedure2.Procedure;
    -031import 
    org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
    -032import 
    org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
    -033import 
    org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
    -034import 
    org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteProcedure;
    -035import 
    org.apache.hadoop.hbase.procedure2.RemoteProcedureException;
    -036import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
    -037import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
    -038import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
    -039import 
    org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
    -040import 
    org.apache.yetus.audience.InterfaceAudience;
    -041import org.slf4j.Logger;
    -042import org.slf4j.LoggerFactory;
    -043
    -044/**
    -045 * Base class for the Assign and Unassign 
    Procedure.
    -046 *
    -047 * Locking:
    -048 * Takes exclusive lock on the region 
    being assigned/unassigned. Thus, there can only be one
    -049 * RegionTransitionProcedure per region 
    running at a time (see MasterProcedureScheduler).
    -050 *
    -051 * pThis procedure is 
    asynchronous and responds to external events.
    -052 * The AssignmentManager will notify this 
    procedure when the RS completes
    -053 * the operation and reports the 
    transitioned state
    -054 * (see the Assign and Unassign class for 
    more detail)./p
    -055 *
    -056 * pProcedures move from the 
    REGION_TRANSITION_QUEUE state when they are
    -057 * first submitted, to the 
    REGION_TRANSITION_DISPATCH state when the request
    -058 * to remote server is sent and the 
    Procedure is suspended waiting on external
    -059 * event to be woken again. Once the 
    external event is triggered, Procedure
    -060 * moves to the REGION_TRANSITION_FINISH 
    state./p
    -061 *
    -062 * pNOTE: {@link AssignProcedure} 
    and {@link UnassignProcedure} should not be thought of
    -063 * as being asymmetric, at least 
    currently.
    -064 * ul
    -065 * li{@link AssignProcedure} 
    moves through all the above described states and implements methods
    -066 * associated with each while {@link 
    UnassignProcedure} starts at state
    -067 * REGION_TRANSITION_DISPATCH and state 
    REGION_TRANSITION_QUEUE is not supported./li
    -068 *
    -069 * liWhen any step in {@link 
    AssignProcedure} fails, failure handler
    -070 * 
    AssignProcedure#handleFailure(MasterProcedureEnv, RegionStateNode) re-attempts 
    the
    -071 * assignment by setting the procedure 
    state to REGION_TRANSITION_QUEUE and forces
    -072 * assignment to a different target 
    server by setting {@link AssignProcedure#forceNewPlan}. When
    -073 * the number of attempts reaches 
    threshold configuration 'hbase.assignment.maximum.attempts',
    -074 * the procedure is aborted. For {@link 
    UnassignProcedure}, similar re-attempts are
    -075 * intentionally not implemented. It is a 
    'one shot' procedure. See its class doc for how it
    -076 * handles failure.
    -077 * /li
    -078 * liIf we find a region in an 
    'unexpected' state, we'll complain and retry with backoff forever.
    -079 * The 'unexpected' state needs to be 
    fixed either by another running Procedure or by operator
    -080 * intervention (Regions in 'unexpected' 
    state indicates bug or unexpected transition type).
    -081 * For this to work, subclasses need to 
    persist the 'attempt' counter kept in this class when
    -082 * they do serializeStateData and restore 
    it inside their deserializeStateData, just as they do
    -083 * for {@link #regionInfo}.
    -084 * /li
    -085 * /ul
    -086 * /p
    -087 *
    -088 * pTODO: Considering it is a 
    priority doing all we can to get make a region available as soon as
    -089 * possible, re-attempting with any 
    target makes sense if specified target fails in case of
    -090 * {@link AssignProcedure}. For {@link 
    UnassignProcedure}, our concern is preventing data loss
    -091 * on failed unassign. See class doc for 
    explanation.
    -092 */
    -093@InterfaceAudience.Private
    -094public abstract class 
    RegionTransitionProcedure
    -095

    [11/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    index d7e2bf4..dbfd9ee 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    @@ -103,2497 +103,2454 @@
     095import 
    org.apache.hadoop.hbase.io.hfile.HFile;
     096import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
     097import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -098import 
    org.apache.hadoop.hbase.net.Address;
    -099import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    -100import 
    org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
    -101import 
    org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
    -102import 
    org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
    -103import 
    org.apache.hadoop.hbase.regionserver.BloomType;
    -104import 
    org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker;
    -105import 
    org.apache.hadoop.hbase.regionserver.InternalScanner;
    -106import 
    org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
    -107import 
    org.apache.hadoop.hbase.regionserver.Region;
    -108import 
    org.apache.hadoop.hbase.regionserver.RegionScanner;
    -109import 
    org.apache.hadoop.hbase.regionserver.ScanType;
    -110import 
    org.apache.hadoop.hbase.regionserver.ScannerContext;
    -111import 
    org.apache.hadoop.hbase.regionserver.Store;
    -112import 
    org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
    -113import 
    org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
    -114import 
    org.apache.hadoop.hbase.replication.ReplicationEndpoint;
    -115import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
    -116import 
    org.apache.hadoop.hbase.security.AccessDeniedException;
    -117import 
    org.apache.hadoop.hbase.security.Superusers;
    -118import 
    org.apache.hadoop.hbase.security.User;
    -119import 
    org.apache.hadoop.hbase.security.UserProvider;
    -120import 
    org.apache.hadoop.hbase.security.access.Permission.Action;
    -121import 
    org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
    -122import 
    org.apache.hadoop.hbase.util.ByteRange;
    -123import 
    org.apache.hadoop.hbase.util.Bytes;
    -124import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -125import 
    org.apache.hadoop.hbase.util.Pair;
    -126import 
    org.apache.hadoop.hbase.util.SimpleMutableByteRange;
    -127import 
    org.apache.hadoop.hbase.wal.WALEdit;
    -128import 
    org.apache.hadoop.hbase.zookeeper.ZKWatcher;
    -129import 
    org.apache.yetus.audience.InterfaceAudience;
    -130import org.slf4j.Logger;
    -131import org.slf4j.LoggerFactory;
    -132
    -133import 
    org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap;
    -134import 
    org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
    -135import 
    org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap;
    -136import 
    org.apache.hbase.thirdparty.com.google.common.collect.Lists;
    -137import 
    org.apache.hbase.thirdparty.com.google.common.collect.MapMaker;
    -138import 
    org.apache.hbase.thirdparty.com.google.common.collect.Maps;
    -139import 
    org.apache.hbase.thirdparty.com.google.common.collect.Sets;
    -140
    -141/**
    -142 * Provides basic authorization checks 
    for data access and administrative
    -143 * operations.
    -144 *
    -145 * p
    -146 * {@code AccessController} performs 
    authorization checks for HBase operations
    -147 * based on:
    -148 * /p
    -149 * ul
    -150 *   lithe identity of the user 
    performing the operation/li
    -151 *   lithe scope over which the 
    operation is performed, in increasing
    -152 *   specificity: global, table, column 
    family, or qualifier/li
    -153 *   lithe type of action being 
    performed (as mapped to
    -154 *   {@link Permission.Action} 
    values)/li
    -155 * /ul
    -156 * p
    -157 * If the authorization check fails, an 
    {@link AccessDeniedException}
    -158 * will be thrown for the operation.
    -159 * /p
    -160 *
    -161 * p
    -162 * To perform authorization checks, 
    {@code AccessController} relies on the
    -163 * RpcServerEngine being loaded to 
    provide
    -164 * the user identities for remote 
    requests.
    -165 * /p
    -166 *
    -167 * p
    -168 * The access control lists used for 
    authorization can be manipulated via the
    -169 * exposed {@link AccessControlService} 
    Interface implementation, and the associated
    -170 * {@code grant}, {@code revoke}, and 
    {@code user_permission} HBase shell
    -171 * commands.
    -172 * /p
    -173 */
    -174@CoreCoprocessor
    -175@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
    -176public class AccessController implements 
    MasterCoprocessor, RegionCoprocessor,
    -177RegionServerCoprocessor, 
    AccessControlService.Interface,
    -178MasterObserver, RegionObserver, 
    

    [11/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
    index 54b1f96..ed95cbf 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
    @@ -31,922 +31,906 @@
     023import java.io.ByteArrayInputStream;
     024import java.io.IOException;
     025import java.util.ArrayList;
    -026import java.util.Arrays;
    -027import java.util.Collections;
    -028import java.util.HashMap;
    -029import java.util.HashSet;
    -030import java.util.LinkedList;
    -031import java.util.List;
    -032import java.util.Map;
    -033import java.util.NavigableSet;
    -034import java.util.Set;
    -035import java.util.SortedSet;
    -036import java.util.TreeSet;
    -037import 
    java.util.concurrent.atomic.AtomicBoolean;
    -038
    -039import 
    org.apache.hadoop.conf.Configuration;
    -040import org.apache.hadoop.hbase.Cell;
    -041import 
    org.apache.hadoop.hbase.CellUtil;
    -042import 
    org.apache.hadoop.hbase.Coprocessor;
    -043import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -044import 
    org.apache.hadoop.hbase.HColumnDescriptor;
    -045import 
    org.apache.hadoop.hbase.HConstants;
    -046import 
    org.apache.hadoop.hbase.HTableDescriptor;
    -047import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -048import 
    org.apache.hadoop.hbase.MetaTableAccessor.DefaultVisitorBase;
    -049import 
    org.apache.hadoop.hbase.ServerName;
    -050import 
    org.apache.hadoop.hbase.TableName;
    -051import 
    org.apache.hadoop.hbase.client.ClusterConnection;
    -052import 
    org.apache.hadoop.hbase.client.Delete;
    -053import 
    org.apache.hadoop.hbase.client.Get;
    -054import 
    org.apache.hadoop.hbase.client.Mutation;
    -055import 
    org.apache.hadoop.hbase.client.Put;
    -056import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -057import 
    org.apache.hadoop.hbase.client.Result;
    -058import 
    org.apache.hadoop.hbase.client.Scan;
    -059import 
    org.apache.hadoop.hbase.client.Table;
    -060import 
    org.apache.hadoop.hbase.client.TableState;
    -061import 
    org.apache.hadoop.hbase.constraint.ConstraintException;
    -062import 
    org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
    -063import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -064import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    -065import 
    org.apache.hadoop.hbase.master.MasterServices;
    -066import 
    org.apache.hadoop.hbase.master.ServerListener;
    -067import 
    org.apache.hadoop.hbase.master.TableStateManager;
    -068import 
    org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
    -069import 
    org.apache.hadoop.hbase.net.Address;
    -070import 
    org.apache.hadoop.hbase.procedure2.Procedure;
    -071import 
    org.apache.hadoop.hbase.protobuf.ProtobufMagic;
    -072import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    -073import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
    -074import 
    org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos;
    -075import 
    org.apache.hadoop.hbase.quotas.QuotaTableUtil;
    -076import 
    org.apache.hadoop.hbase.quotas.QuotaUtil;
    -077import 
    org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
    -078import 
    org.apache.hadoop.hbase.security.access.AccessControlLists;
    -079import 
    org.apache.hadoop.hbase.util.Bytes;
    -080import 
    org.apache.hadoop.hbase.zookeeper.ZKUtil;
    -081import 
    org.apache.hadoop.hbase.zookeeper.ZKWatcher;
    -082import 
    org.apache.hadoop.hbase.zookeeper.ZNodePaths;
    -083import 
    org.apache.yetus.audience.InterfaceAudience;
    -084import 
    org.apache.zookeeper.KeeperException;
    -085import org.slf4j.Logger;
    -086import org.slf4j.LoggerFactory;
    -087
    -088import 
    org.apache.hbase.thirdparty.com.google.common.collect.Lists;
    -089import 
    org.apache.hbase.thirdparty.com.google.common.collect.Maps;
    -090import 
    org.apache.hbase.thirdparty.com.google.common.collect.Sets;
    -091import 
    org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
    -092import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
    -093
    -094/**
    -095 * This is an implementation of {@link 
    RSGroupInfoManager} which makes
    -096 * use of an HBase table as the 
    persistence store for the group information.
    -097 * It also makes use of zookeeper to 
    store group information needed
    -098 * for bootstrapping during offline 
    mode.
    -099 *
    -100 * h2Concurrency/h2
    -101 * RSGroup state is kept locally in Maps. 
    There is a rsgroup name to cached
    -102 * RSGroupInfo Map at {@link #rsGroupMap} 
    and a Map of tables to the name of the
    -103 * rsgroup they belong too (in {@link 
    #tableMap}). These Maps are persisted to the
    -104 * hbase:rsgroup table (and cached in zk) 
    on each modification.
    -105 *
    -106 * 

    [11/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncScanTest.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncScanTest.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncScanTest.html
    index 3f8844b..cdb9398 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncScanTest.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncScanTest.html
    @@ -140,2712 +140,2713 @@
     132public class PerformanceEvaluation 
    extends Configured implements Tool {
     133  static final String RANDOM_SEEK_SCAN = 
    "randomSeekScan";
     134  static final String RANDOM_READ = 
    "randomRead";
    -135  private static final Logger LOG = 
    LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
    -136  private static final ObjectMapper 
    MAPPER = new ObjectMapper();
    -137  static {
    -138
    MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
    -139  }
    -140
    -141  public static final String TABLE_NAME = 
    "TestTable";
    -142  public static final String 
    FAMILY_NAME_BASE = "info";
    -143  public static final byte[] FAMILY_ZERO 
    = Bytes.toBytes("info0");
    -144  public static final byte[] COLUMN_ZERO 
    = Bytes.toBytes("" + 0);
    -145  public static final int 
    DEFAULT_VALUE_LENGTH = 1000;
    -146  public static final int ROW_LENGTH = 
    26;
    -147
    -148  private static final int ONE_GB = 1024 
    * 1024 * 1000;
    -149  private static final int 
    DEFAULT_ROWS_PER_GB = ONE_GB / DEFAULT_VALUE_LENGTH;
    -150  // TODO : should we make this 
    configurable
    -151  private static final int TAG_LENGTH = 
    256;
    -152  private static final DecimalFormat FMT 
    = new DecimalFormat("0.##");
    -153  private static final MathContext CXT = 
    MathContext.DECIMAL64;
    -154  private static final BigDecimal 
    MS_PER_SEC = BigDecimal.valueOf(1000);
    -155  private static final BigDecimal 
    BYTES_PER_MB = BigDecimal.valueOf(1024 * 1024);
    -156  private static final TestOptions 
    DEFAULT_OPTS = new TestOptions();
    -157
    -158  private static MapString, 
    CmdDescriptor COMMANDS = new TreeMap();
    -159  private static final Path PERF_EVAL_DIR 
    = new Path("performance_evaluation");
    -160
    -161  static {
    -162
    addCommandDescriptor(AsyncRandomReadTest.class, "asyncRandomRead",
    -163"Run async random read test");
    -164
    addCommandDescriptor(AsyncRandomWriteTest.class, "asyncRandomWrite",
    -165"Run async random write test");
    -166
    addCommandDescriptor(AsyncSequentialReadTest.class, "asyncSequentialRead",
    -167"Run async sequential read 
    test");
    -168
    addCommandDescriptor(AsyncSequentialWriteTest.class, "asyncSequentialWrite",
    -169"Run async sequential write 
    test");
    -170
    addCommandDescriptor(AsyncScanTest.class, "asyncScan",
    -171"Run async scan test (read every 
    row)");
    -172
    addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
    -173  "Run random read test");
    -174
    addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
    -175  "Run random seek and scan 100 
    test");
    -176
    addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
    -177  "Run random seek scan with both 
    start and stop row (max 10 rows)");
    -178
    addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
    -179  "Run random seek scan with both 
    start and stop row (max 100 rows)");
    -180
    addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
    -181  "Run random seek scan with both 
    start and stop row (max 1000 rows)");
    -182
    addCommandDescriptor(RandomScanWithRange1Test.class, "scanRange1",
    -183  "Run random seek scan with both 
    start and stop row (max 1 rows)");
    -184
    addCommandDescriptor(RandomWriteTest.class, "randomWrite",
    -185  "Run random write test");
    -186
    addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
    -187  "Run sequential read test");
    -188
    addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
    -189  "Run sequential write test");
    -190addCommandDescriptor(ScanTest.class, 
    "scan",
    -191  "Run scan test (read every 
    row)");
    -192
    addCommandDescriptor(FilteredScanTest.class, "filterScan",
    -193  "Run scan test using a filter to 
    find a specific row based on it's value " +
    -194  "(make sure to use --rows=20)");
    -195
    addCommandDescriptor(IncrementTest.class, "increment",
    -196  "Increment on each row; clients 
    overlap on keyspace so some concurrent operations");
    -197
    addCommandDescriptor(AppendTest.class, "append",
    -198  "Append on each row; clients 
    overlap on keyspace so some concurrent operations");
    -199
    addCommandDescriptor(CheckAndMutateTest.class, "checkAndMutate",
    -200  "CheckAndMutate on each row; 
    clients overlap on keyspace so some concurrent operations");
    -201
    

    [11/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
    index 4c42811..0bc3ddb 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
    @@ -563,381 +563,390 @@
     555// If this is first time we've 
    been put off, then emit a log message.
     556if (fqe.getRequeueCount() = 
    0) {
     557  // Note: We don't impose 
    blockingStoreFiles constraint on meta regions
    -558  LOG.warn("Region " + 
    region.getRegionInfo().getEncodedName() + " has too many " +
    -559"store files; delaying flush 
    up to " + this.blockingWaitTime + "ms");
    -560  if 
    (!this.server.compactSplitThread.requestSplit(region)) {
    -561try {
    -562  
    this.server.compactSplitThread.requestSystemCompaction(region,
    -563
    Thread.currentThread().getName());
    -564} catch (IOException e) {
    -565  e = e instanceof 
    RemoteException ?
    -566  
    ((RemoteException)e).unwrapRemoteException() : e;
    -567  LOG.error("Cache flush 
    failed for region " +
    -568
    Bytes.toStringBinary(region.getRegionInfo().getRegionName()), e);
    -569}
    -570  }
    -571}
    -572
    -573// Put back on the queue.  Have 
    it come back out of the queue
    -574// after a delay of 
    this.blockingWaitTime / 100 ms.
    -575
    this.flushQueue.add(fqe.requeue(this.blockingWaitTime / 100));
    -576// Tell a lie, it's not flushed 
    but it's ok
    -577return true;
    -578  }
    -579}
    -580return flushRegion(region, false, 
    fqe.isForceFlushAllStores(), fqe.getTracker());
    -581  }
    -582
    -583  /**
    -584   * Flush a region.
    -585   * @param region Region to flush.
    -586   * @param emergencyFlush Set if we are 
    being force flushed. If true the region
    -587   * needs to be removed from the flush 
    queue. If false, when we were called
    -588   * from the main flusher run loop and 
    we got the entry to flush by calling
    -589   * poll on the flush queue (which 
    removed it).
    -590   * @param forceFlushAllStores whether 
    we want to flush all store.
    -591   * @return true if the region was 
    successfully flushed, false otherwise. If
    -592   * false, there will be accompanying 
    log messages explaining why the region was
    -593   * not flushed.
    -594   */
    -595  private boolean flushRegion(HRegion 
    region, boolean emergencyFlush, boolean forceFlushAllStores,
    -596  FlushLifeCycleTracker tracker) {
    -597synchronized (this.regionsInQueue) 
    {
    -598  FlushRegionEntry fqe = 
    this.regionsInQueue.remove(region);
    -599  // Use the start time of the 
    FlushRegionEntry if available
    -600  if (fqe != null  
    emergencyFlush) {
    -601// Need to remove from region 
    from delay queue. When NOT an
    -602// emergencyFlush, then item was 
    removed via a flushQueue.poll.
    -603flushQueue.remove(fqe);
    -604  }
    -605}
    -606
    -607tracker.beforeExecution();
    -608lock.readLock().lock();
    -609try {
    -610  notifyFlushRequest(region, 
    emergencyFlush);
    -611  FlushResult flushResult = 
    region.flushcache(forceFlushAllStores, false, tracker);
    -612  boolean shouldCompact = 
    flushResult.isCompactionNeeded();
    -613  // We just want to check the size
    -614  boolean shouldSplit = 
    region.checkSplit() != null;
    -615  if (shouldSplit) {
    -616
    this.server.compactSplitThread.requestSplit(region);
    -617  } else if (shouldCompact) {
    -618
    server.compactSplitThread.requestSystemCompaction(region, 
    Thread.currentThread().getName());
    -619  }
    -620} catch (DroppedSnapshotException ex) 
    {
    -621  // Cache flush can fail in a few 
    places. If it fails in a critical
    -622  // section, we get a 
    DroppedSnapshotException and a replay of wal
    -623  // is required. Currently the only 
    way to do this is a restart of
    -624  // the server. Abort because hdfs 
    is probably bad (HBASE-644 is a case
    -625  // where hdfs was bad but passed 
    the hdfs check).
    -626  server.abort("Replay of WAL 
    required. Forcing server shutdown", ex);
    -627  return false;
    -628} catch (IOException ex) {
    -629  ex = ex instanceof RemoteException 
    ? ((RemoteException) ex).unwrapRemoteException() : ex;
    -630  LOG.error(
    -631"Cache flush failed"
    -632+ (region != null ? (" for 
    region " +
    -633
    Bytes.toStringBinary(region.getRegionInfo().getRegionName()))
    -634  : ""), ex);
    -635  if (!server.checkFileSystem()) {
    -636return false;
    -637  }
    -638} finally 

    [11/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange100Test.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange100Test.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange100Test.html
    index 2510283..418c60c 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange100Test.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange100Test.html
    @@ -77,77 +77,77 @@
     069import 
    org.apache.hadoop.hbase.client.RowMutations;
     070import 
    org.apache.hadoop.hbase.client.Scan;
     071import 
    org.apache.hadoop.hbase.client.Table;
    -072import 
    org.apache.hadoop.hbase.filter.BinaryComparator;
    -073import 
    org.apache.hadoop.hbase.filter.Filter;
    -074import 
    org.apache.hadoop.hbase.filter.FilterAllFilter;
    -075import 
    org.apache.hadoop.hbase.filter.FilterList;
    -076import 
    org.apache.hadoop.hbase.filter.PageFilter;
    -077import 
    org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
    -078import 
    org.apache.hadoop.hbase.filter.WhileMatchFilter;
    -079import 
    org.apache.hadoop.hbase.io.compress.Compression;
    -080import 
    org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
    -081import 
    org.apache.hadoop.hbase.io.hfile.RandomDistribution;
    -082import 
    org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
    -083import 
    org.apache.hadoop.hbase.regionserver.BloomType;
    -084import 
    org.apache.hadoop.hbase.regionserver.CompactingMemStore;
    -085import 
    org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
    -086import 
    org.apache.hadoop.hbase.trace.SpanReceiverHost;
    -087import 
    org.apache.hadoop.hbase.trace.TraceUtil;
    -088import 
    org.apache.hadoop.hbase.util.ByteArrayHashKey;
    -089import 
    org.apache.hadoop.hbase.util.Bytes;
    -090import 
    org.apache.hadoop.hbase.util.Hash;
    -091import 
    org.apache.hadoop.hbase.util.MurmurHash;
    -092import 
    org.apache.hadoop.hbase.util.Pair;
    -093import 
    org.apache.hadoop.hbase.util.YammerHistogramUtils;
    -094import 
    org.apache.hadoop.io.LongWritable;
    -095import org.apache.hadoop.io.Text;
    -096import org.apache.hadoop.mapreduce.Job;
    -097import 
    org.apache.hadoop.mapreduce.Mapper;
    -098import 
    org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
    -099import 
    org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
    -100import 
    org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
    -101import org.apache.hadoop.util.Tool;
    -102import 
    org.apache.hadoop.util.ToolRunner;
    -103import 
    org.apache.htrace.core.ProbabilitySampler;
    -104import org.apache.htrace.core.Sampler;
    -105import 
    org.apache.htrace.core.TraceScope;
    -106import 
    org.apache.yetus.audience.InterfaceAudience;
    -107import org.slf4j.Logger;
    -108import org.slf4j.LoggerFactory;
    -109import 
    org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
    -110import 
    org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
    -111
    -112/**
    -113 * Script used evaluating HBase 
    performance and scalability.  Runs a HBase
    -114 * client that steps through one of a set 
    of hardcoded tests or 'experiments'
    -115 * (e.g. a random reads test, a random 
    writes test, etc.). Pass on the
    -116 * command-line which test to run and how 
    many clients are participating in
    -117 * this experiment. Run {@code 
    PerformanceEvaluation --help} to obtain usage.
    -118 *
    -119 * pThis class sets up and runs 
    the evaluation programs described in
    -120 * Section 7, iPerformance 
    Evaluation/i, of the a
    -121 * 
    href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
    -122 * paper, pages 8-10.
    -123 *
    -124 * pBy default, runs as a 
    mapreduce job where each mapper runs a single test
    -125 * client. Can also run as a 
    non-mapreduce, multithreaded application by
    -126 * specifying {@code --nomapred}. Each 
    client does about 1GB of data, unless
    -127 * specified otherwise.
    -128 */
    -129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
    -130public class PerformanceEvaluation 
    extends Configured implements Tool {
    -131  static final String RANDOM_SEEK_SCAN = 
    "randomSeekScan";
    -132  static final String RANDOM_READ = 
    "randomRead";
    -133  private static final Logger LOG = 
    LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
    -134  private static final ObjectMapper 
    MAPPER = new ObjectMapper();
    -135  static {
    -136
    MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
    -137  }
    -138
    -139  public static final String TABLE_NAME = 
    "TestTable";
    -140  public static final byte[] FAMILY_NAME 
    = Bytes.toBytes("info");
    -141  public static final byte [] COLUMN_ZERO 
    = Bytes.toBytes("" + 0);
    -142  public static final byte [] 
    QUALIFIER_NAME = COLUMN_ZERO;
    +072import 
    org.apache.hadoop.hbase.client.metrics.ScanMetrics;
    +073import 
    org.apache.hadoop.hbase.filter.BinaryComparator;
    +074import 
    

    [11/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
    index 0f850d3..82cd795 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
    @@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
     
     
     PrevClass
    -NextClass
    +NextClass
     
     
     Frames
    @@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
     
     
     Summary:
    -Nested|
    +Nested|
     Field|
     Constr|
     Method
    @@ -137,7 +137,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public static class TestReplicationEndpoint.InterClusterReplicationEndpointForTest
    +public static class TestReplicationEndpoint.InterClusterReplicationEndpointForTest
     extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
     
     
    @@ -151,28 +151,6 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     Nested Class Summary
    -
    -Nested Classes
    -
    -Modifier and Type
    -Class and Description
    -
    -
    -protected class
    -TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator
    -
    -
    -protected class
    -TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator
    -
    -
    -
    -
    -
    -
    -Nested classes/interfaces inherited from 
    classorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
    -org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.Replicator
    -
     
     
     
    @@ -256,7 +234,7 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     Method and Description
     
     
    -protected 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.Replicator
    +protected https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
     title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
     title="class or interface in java.lang">Integer
     createReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
     intordinal)
     
    @@ -270,7 +248,7 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     Methods inherited from 
    classorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
    -doStop, init, isPeerEnabled, sleepForRetries
    +doStop, init, isPeerEnabled, replicateEntries, 
    sleepForRetries
     
     
     
    @@ -327,7 +305,7 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     replicateCount
    -statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
     title="class or interface in java.util.concurrent.atomic">AtomicInteger replicateCount
    +statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
     title="class or interface in java.util.concurrent.atomic">AtomicInteger replicateCount
     
     
     
    @@ -336,7 +314,7 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     failedOnce
    -staticboolean failedOnce
    +staticboolean failedOnce
     
     
     
    @@ -353,7 +331,7 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     InterClusterReplicationEndpointForTest
    -publicInterClusterReplicationEndpointForTest()
    +publicInterClusterReplicationEndpointForTest()
     
     
     
    @@ -370,7 +348,7 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     replicate
    -publicbooleanreplicate(org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContextreplicateContext)
    +publicbooleanreplicate(org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContextreplicateContext)
     
     Specified by:
     replicatein 
    interfaceorg.apache.hadoop.hbase.replication.ReplicationEndpoint
    @@ -385,8 +363,8 @@ extends 
    org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
     
     
     createReplicator
    -protectedorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.ReplicatorcreateReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
    -   
    

    [11/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
    index 8302e28..c370eb9 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
    @@ -2113,3031 +2113,3033 @@
     2105
    errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
     2106tableName + " unable to 
    delete dangling table state " + tableState);
     2107  }
    -2108} else {
    -2109  
    errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
    -2110  tableName + " has dangling 
    table state " + tableState);
    -2111}
    -2112  }
    -2113}
    -2114// check that all tables have 
    states
    -2115for (TableName tableName : 
    tablesInfo.keySet()) {
    -2116  if (isTableIncluded(tableName) 
     !tableStates.containsKey(tableName)) {
    -2117if (fixMeta) {
    -2118  
    MetaTableAccessor.updateTableState(connection, tableName, 
    TableState.State.ENABLED);
    -2119  TableState newState = 
    MetaTableAccessor.getTableState(connection, tableName);
    -2120  if (newState == null) {
    -2121
    errors.reportError(ERROR_CODE.NO_TABLE_STATE,
    -2122"Unable to change state 
    for table " + tableName + " in meta ");
    -2123  }
    -2124} else {
    -2125  
    errors.reportError(ERROR_CODE.NO_TABLE_STATE,
    -2126  tableName + " has no state 
    in meta ");
    -2127}
    -2128  }
    -2129}
    -2130  }
    -2131
    -2132  private void preCheckPermission() 
    throws IOException, AccessDeniedException {
    -2133if 
    (shouldIgnorePreCheckPermission()) {
    -2134  return;
    -2135}
    -2136
    -2137Path hbaseDir = 
    FSUtils.getRootDir(getConf());
    -2138FileSystem fs = 
    hbaseDir.getFileSystem(getConf());
    -2139UserProvider userProvider = 
    UserProvider.instantiate(getConf());
    -2140UserGroupInformation ugi = 
    userProvider.getCurrent().getUGI();
    -2141FileStatus[] files = 
    fs.listStatus(hbaseDir);
    -2142for (FileStatus file : files) {
    -2143  try {
    -2144FSUtils.checkAccess(ugi, file, 
    FsAction.WRITE);
    -2145  } catch (AccessDeniedException 
    ace) {
    -2146LOG.warn("Got 
    AccessDeniedException when preCheckPermission ", ace);
    -2147
    errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + 
    ugi.getUserName()
    -2148  + " does not have write perms 
    to " + file.getPath()
    -2149  + ". Please rerun hbck as hdfs 
    user " + file.getOwner());
    -2150throw ace;
    -2151  }
    -2152}
    -2153  }
    -2154
    -2155  /**
    -2156   * Deletes region from meta table
    -2157   */
    -2158  private void deleteMetaRegion(HbckInfo 
    hi) throws IOException {
    -2159
    deleteMetaRegion(hi.metaEntry.getRegionName());
    -2160  }
    -2161
    -2162  /**
    -2163   * Deletes region from meta table
    -2164   */
    -2165  private void deleteMetaRegion(byte[] 
    metaKey) throws IOException {
    -2166Delete d = new Delete(metaKey);
    -2167meta.delete(d);
    -2168LOG.info("Deleted " + 
    Bytes.toString(metaKey) + " from META" );
    -2169  }
    -2170
    -2171  /**
    -2172   * Reset the split parent region info 
    in meta table
    -2173   */
    -2174  private void resetSplitParent(HbckInfo 
    hi) throws IOException {
    -2175RowMutations mutations = new 
    RowMutations(hi.metaEntry.getRegionName());
    -2176Delete d = new 
    Delete(hi.metaEntry.getRegionName());
    -2177
    d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
    -2178
    d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
    -2179mutations.add(d);
    -2180
    -2181RegionInfo hri = 
    RegionInfoBuilder.newBuilder(hi.metaEntry)
    -2182.setOffline(false)
    -2183.setSplit(false)
    -2184.build();
    -2185Put p = 
    MetaTableAccessor.makePutFromRegionInfo(hri, 
    EnvironmentEdgeManager.currentTime());
    -2186mutations.add(p);
    -2187
    -2188meta.mutateRow(mutations);
    -2189LOG.info("Reset split parent " + 
    hi.metaEntry.getRegionNameAsString() + " in META" );
    -2190  }
    -2191
    -2192  /**
    -2193   * This backwards-compatibility 
    wrapper for permanently offlining a region
    -2194   * that should not be alive.  If the 
    region server does not support the
    -2195   * "offline" method, it will use the 
    closest unassign method instead.  This
    -2196   * will basically work until one 
    attempts to disable or delete the affected
    -2197   * table.  The problem has to do with 
    in-memory only master state, so
    -2198   * restarting the HMaster or failing 
    over to another should fix this.
    -2199   */
    -2200  private void offline(byte[] 
    regionName) throws IOException {
    -2201String regionString = 
    Bytes.toStringBinary(regionName);
    -2202if (!rsSupportsOffline) {
    -2203  LOG.warn("Using unassign region 

    [11/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
    index 50caf18..61bf913 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
    @@ -45,773 +45,774 @@
     037import java.util.TimeZone;
     038import java.util.concurrent.TimeUnit;
     039
    -040import 
    org.apache.commons.cli.CommandLine;
    -041import 
    org.apache.commons.cli.CommandLineParser;
    -042import 
    org.apache.commons.cli.HelpFormatter;
    -043import org.apache.commons.cli.Option;
    -044import 
    org.apache.commons.cli.OptionGroup;
    -045import org.apache.commons.cli.Options;
    -046import 
    org.apache.commons.cli.ParseException;
    -047import 
    org.apache.commons.cli.PosixParser;
    -048import 
    org.apache.commons.lang3.StringUtils;
    -049import 
    org.apache.hadoop.conf.Configuration;
    -050import 
    org.apache.hadoop.conf.Configured;
    -051import org.apache.hadoop.fs.FileSystem;
    -052import org.apache.hadoop.fs.Path;
    -053import org.apache.hadoop.hbase.Cell;
    -054import 
    org.apache.hadoop.hbase.CellComparator;
    -055import 
    org.apache.hadoop.hbase.CellUtil;
    -056import 
    org.apache.hadoop.hbase.HBaseConfiguration;
    -057import 
    org.apache.hadoop.hbase.HBaseInterfaceAudience;
    -058import 
    org.apache.hadoop.hbase.HConstants;
    -059import 
    org.apache.hadoop.hbase.HRegionInfo;
    -060import 
    org.apache.hadoop.hbase.KeyValue;
    -061import 
    org.apache.hadoop.hbase.KeyValueUtil;
    -062import 
    org.apache.hadoop.hbase.PrivateCellUtil;
    -063import 
    org.apache.hadoop.hbase.TableName;
    -064import org.apache.hadoop.hbase.Tag;
    -065import 
    org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
    -066import 
    org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
    -067import 
    org.apache.hadoop.hbase.mob.MobUtils;
    -068import 
    org.apache.hadoop.hbase.regionserver.HStoreFile;
    -069import 
    org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
    -070import 
    org.apache.hadoop.hbase.util.BloomFilter;
    -071import 
    org.apache.hadoop.hbase.util.BloomFilterFactory;
    -072import 
    org.apache.hadoop.hbase.util.BloomFilterUtil;
    -073import 
    org.apache.hadoop.hbase.util.Bytes;
    -074import 
    org.apache.hadoop.hbase.util.FSUtils;
    -075import 
    org.apache.hadoop.hbase.util.HFileArchiveUtil;
    -076import org.apache.hadoop.util.Tool;
    -077import 
    org.apache.hadoop.util.ToolRunner;
    -078import 
    org.apache.yetus.audience.InterfaceAudience;
    -079import 
    org.apache.yetus.audience.InterfaceStability;
    -080import org.slf4j.Logger;
    -081import org.slf4j.LoggerFactory;
    -082
    -083import 
    com.codahale.metrics.ConsoleReporter;
    -084import com.codahale.metrics.Counter;
    -085import com.codahale.metrics.Gauge;
    -086import com.codahale.metrics.Histogram;
    -087import com.codahale.metrics.Meter;
    -088import 
    com.codahale.metrics.MetricFilter;
    -089import 
    com.codahale.metrics.MetricRegistry;
    -090import 
    com.codahale.metrics.ScheduledReporter;
    -091import com.codahale.metrics.Snapshot;
    -092import com.codahale.metrics.Timer;
    -093
    -094/**
    -095 * Implements pretty-printing 
    functionality for {@link HFile}s.
    -096 */
    -097@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
    -098@InterfaceStability.Evolving
    -099public class HFilePrettyPrinter extends 
    Configured implements Tool {
    -100
    -101  private static final Logger LOG = 
    LoggerFactory.getLogger(HFilePrettyPrinter.class);
    -102
    -103  private Options options = new 
    Options();
    -104
    -105  private boolean verbose;
    -106  private boolean printValue;
    -107  private boolean printKey;
    -108  private boolean shouldPrintMeta;
    -109  private boolean printBlockIndex;
    -110  private boolean printBlockHeaders;
    -111  private boolean printStats;
    -112  private boolean checkRow;
    -113  private boolean checkFamily;
    -114  private boolean isSeekToRow = false;
    -115  private boolean checkMobIntegrity = 
    false;
    -116  private MapString, 
    ListPath mobFileLocations;
    -117  private static final int 
    FOUND_MOB_FILES_CACHE_CAPACITY = 50;
    -118  private static final int 
    MISSING_MOB_FILES_CACHE_CAPACITY = 20;
    -119  private PrintStream out = System.out;
    -120  private PrintStream err = System.err;
    -121
    -122  /**
    -123   * The row which the user wants to 
    specify and print all the KeyValues for.
    -124   */
    -125  private byte[] row = null;
    -126
    -127  private ListPath files = new 
    ArrayList();
    -128  private int count;
    -129
    -130  private static final String FOUR_SPACES 
    = "";
    -131
    -132  public HFilePrettyPrinter() {
    -133super();
    -134init();
    -135  }
    -136
    -137  public HFilePrettyPrinter(Configuration 
    conf) {
    -138super(conf);
    -139init();
    -140  }
    -141
    -142  private void init() {
    -143options.addOption("v", "verbose", 
    false,
    -144"Verbose output; emits file and 
    meta data delimiters");
    -145options.addOption("p", 

    [11/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
    b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
    index 1d50582..7eb7661 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
    @@ -104,14 +104,14 @@
     
     
     void
    -HMaster.checkTableModifiable(TableNametableName)
    -
    -
    -void
     MasterServices.checkTableModifiable(TableNametableName)
     Check table is modifiable; i.e.
     
     
    +
    +void
    +HMaster.checkTableModifiable(TableNametableName)
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
    b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
    index 6194cbc..25d874c 100644
    --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
    +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
    @@ -170,14 +170,14 @@
     
     
     void
    -HMaster.checkTableModifiable(TableNametableName)
    -
    -
    -void
     MasterServices.checkTableModifiable(TableNametableName)
     Check table is modifiable; i.e.
     
     
    +
    +void
    +HMaster.checkTableModifiable(TableNametableName)
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
    b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
    index 067aa54..1b9fa6a 100644
    --- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
    +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
    @@ -415,17 +415,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     
    -RawCellBuilder
    -RawCellBuilder.setTags(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTagtags)
    -
    -
     ExtendedCellBuilder
     ExtendedCellBuilder.setTags(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTagtags)
     
    -
    +
     ExtendedCellBuilder
     ExtendedCellBuilderImpl.setTags(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTagtags)
     
    +
    +RawCellBuilder
    +RawCellBuilder.setTags(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTagtags)
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
     
    b/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
    index 9ebf945..0478aee 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
    @@ -164,7 +164,7 @@
     
     
     boolean
    -ConnectionImplementation.isMasterRunning()
    +ClusterConnection.isMasterRunning()
     Deprecated.
     this has been deprecated 
    without a replacement
     
    @@ -172,7 +172,7 @@
     
     
     boolean
    -ClusterConnection.isMasterRunning()
    +ConnectionImplementation.isMasterRunning()
     Deprecated.
     this has been deprecated 
    without a replacement
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/client/Append.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/Append.html 
    b/devapidocs/org/apache/hadoop/hbase/client/Append.html
    index 8f79084..72170e4 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/Append.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/Append.html
    @@ -367,7 +367,7 @@ extends 
     
     Methods inherited from classorg.apache.hadoop.hbase.client.Mutation
    -cellScanner,
     checkRow,
     checkRow,
     checkRow,
     compareTo,
     createPutKeyValue,
     createPutKeyValue,
     createPutKeyValue, get,
     getACL,
     getCellList,
     getCellVisibility,
     getClusterIds,
     getDurability,
     getFamilyCellMap,
     getFingerprint,
     getRow,
     getTimeStamp,
     getTTL,
     has,
     has,
     has,
     has,
     has,
     heapSize,
     isEmpty,
     numFamilies, size,
     toCellVisibility,
     toMap
    +cellScanner,
     checkRow,
     checkRow,
     checkRow,
     compareTo,
     createPutKeyValue,
     createPutKeyValue,
     createPutKeyValue, get,
     getACL,
     getCellList,
     getCellVisibility,
     

    [11/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
    b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    index 5f975db..3b7627b 100644
    --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    @@ -151,119 +151,119 @@
     
     
     Filter.ReturnCode
    -ColumnPrefixFilter.filterCell(Cellcell)
    +FilterListWithAND.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnCountGetFilter.filterCell(Cellc)
    +ValueFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -RowFilter.filterCell(Cellv)
    +SkipFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FuzzyRowFilter.filterCell(Cellc)
    +FamilyFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -Filter.filterCell(Cellc)
    -A way to filter based on the column family, column 
    qualifier and/or the column value.
    -
    +ColumnPrefixFilter.filterCell(Cellcell)
     
     
     Filter.ReturnCode
    -RandomRowFilter.filterCell(Cellc)
    +PageFilter.filterCell(Cellignored)
     
     
     Filter.ReturnCode
    -FirstKeyOnlyFilter.filterCell(Cellc)
    +RowFilter.filterCell(Cellv)
     
     
     Filter.ReturnCode
    -SkipFilter.filterCell(Cellc)
    +ColumnRangeFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -TimestampsFilter.filterCell(Cellc)
    +ColumnCountGetFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ValueFilter.filterCell(Cellc)
    +MultipleColumnPrefixFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -KeyOnlyFilter.filterCell(Cellignored)
    +ColumnPaginationFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FamilyFilter.filterCell(Cellc)
    +DependentColumnFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -QualifierFilter.filterCell(Cellc)
    +FilterListWithOR.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FilterList.filterCell(Cellc)
    +InclusiveStopFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnRangeFilter.filterCell(Cellc)
    +KeyOnlyFilter.filterCell(Cellignored)
     
     
     Filter.ReturnCode
    -ColumnPaginationFilter.filterCell(Cellc)
    +MultiRowRangeFilter.filterCell(Cellignored)
     
     
     Filter.ReturnCode
    -FilterListWithAND.filterCell(Cellc)
    +Filter.filterCell(Cellc)
    +A way to filter based on the column family, column 
    qualifier and/or the column value.
    +
     
     
     Filter.ReturnCode
    -WhileMatchFilter.filterCell(Cellc)
    +FirstKeyOnlyFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnValueFilter.filterCell(Cellc)
    +WhileMatchFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -MultiRowRangeFilter.filterCell(Cellignored)
    +FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
    +Deprecated.
    +
     
     
     Filter.ReturnCode
    -PrefixFilter.filterCell(Cellc)
    +TimestampsFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -DependentColumnFilter.filterCell(Cellc)
    +FuzzyRowFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
    -Deprecated.
    -
    +FilterList.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -PageFilter.filterCell(Cellignored)
    +RandomRowFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FilterListWithOR.filterCell(Cellc)
    +PrefixFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -InclusiveStopFilter.filterCell(Cellc)
    +SingleColumnValueFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -MultipleColumnPrefixFilter.filterCell(Cellc)
    +ColumnValueFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -SingleColumnValueFilter.filterCell(Cellc)
    +QualifierFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    @@ -279,158 +279,158 @@
     
     
     Filter.ReturnCode
    -ColumnPrefixFilter.filterKeyValue(Cellc)
    +ValueFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -ColumnCountGetFilter.filterKeyValue(Cellc)
    +SkipFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -RowFilter.filterKeyValue(Cellc)
    -Deprecated.
    -
    +FilterListBase.filterKeyValue(Cellc)
     
     
     Filter.ReturnCode
    -FuzzyRowFilter.filterKeyValue(Cellc)
    +FamilyFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -Filter.filterKeyValue(Cellc)
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0.
    - Instead use filterCell(Cell)
    -
    +ColumnPrefixFilter.filterKeyValue(Cellc)
    +Deprecated.
     
     
     
     Filter.ReturnCode
    -RandomRowFilter.filterKeyValue(Cellc)
    +PageFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -FirstKeyOnlyFilter.filterKeyValue(Cellc)
    +RowFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -SkipFilter.filterKeyValue(Cellc)
    +ColumnRangeFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -TimestampsFilter.filterKeyValue(Cellc)
    +ColumnCountGetFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -ValueFilter.filterKeyValue(Cellc)
    +MultipleColumnPrefixFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -KeyOnlyFilter.filterKeyValue(Cellignored)
    +ColumnPaginationFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    

    [11/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
    index ad762f2..73580f6 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
    @@ -118,7 +118,8 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class MultiThreadedClientExample
    +@InterfaceAudience.Private
    +public class MultiThreadedClientExample
     extends org.apache.hadoop.conf.Configured
     implements org.apache.hadoop.util.Tool
     Example on how to use HBase's Connection 
    and Table in a
    @@ -321,7 +322,7 @@ implements org.apache.hadoop.util.Tool
     
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -330,7 +331,7 @@ implements org.apache.hadoop.util.Tool
     
     
     DEFAULT_NUM_OPERATIONS
    -private static finalint DEFAULT_NUM_OPERATIONS
    +private static finalint DEFAULT_NUM_OPERATIONS
     
     See Also:
     Constant
     Field Values
    @@ -343,7 +344,7 @@ implements org.apache.hadoop.util.Tool
     
     
     FAMILY
    -private static finalbyte[] FAMILY
    +private static finalbyte[] FAMILY
     The name of the column family.
     
      d for default.
    @@ -355,7 +356,7 @@ implements org.apache.hadoop.util.Tool
     
     
     QUAL
    -private static finalbyte[] QUAL
    +private static finalbyte[] QUAL
     For the example we're just using one qualifier.
     
     
    @@ -365,7 +366,7 @@ implements org.apache.hadoop.util.Tool
     
     
     internalPool
    -private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
     title="class or interface in java.util.concurrent">ExecutorService internalPool
    +private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
     title="class or interface in java.util.concurrent">ExecutorService internalPool
     
     
     
    @@ -374,7 +375,7 @@ implements org.apache.hadoop.util.Tool
     
     
     threads
    -private finalint threads
    +private finalint threads
     
     
     
    @@ -391,7 +392,7 @@ implements org.apache.hadoop.util.Tool
     
     
     MultiThreadedClientExample
    -publicMultiThreadedClientExample()
    +publicMultiThreadedClientExample()
    throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     
     Throws:
    @@ -413,7 +414,7 @@ implements org.apache.hadoop.util.Tool
     
     
     run
    -publicintrun(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String[]args)
    +publicintrun(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String[]args)
     throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
     title="class or interface in java.lang">Exception
     
     Specified by:
    @@ -429,7 +430,7 @@ implements org.apache.hadoop.util.Tool
     
     
     warmUpConnectionCache
    -privatevoidwarmUpConnectionCache(Connectionconnection,
    +privatevoidwarmUpConnectionCache(Connectionconnection,
    TableNametn)
     throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     
    @@ -444,7 +445,7 @@ implements org.apache.hadoop.util.Tool
     
     
     main
    -public staticvoidmain(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String[]args)
    +public staticvoidmain(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String[]args)
      throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
     title="class or interface in java.lang">Exception
     
     Throws:
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.html 
    b/devapidocs/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.html
    index f4b8bbd..ef59d1e 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.html
    @@ -113,7 +113,8 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class RefreshHFilesClient
    +@InterfaceAudience.Private
    +public class RefreshHFilesClient
     extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or 

    [11/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferExceptionHandler.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferExceptionHandler.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferExceptionHandler.html
    index 9971079..03c8b000 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferExceptionHandler.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferExceptionHandler.html
    @@ -49,1067 +49,1082 @@
     041import org.apache.hadoop.fs.Path;
     042import 
    org.apache.hadoop.hbase.HConstants;
     043import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -044import 
    org.apache.hadoop.hbase.trace.TraceUtil;
    -045import 
    org.apache.hadoop.hbase.util.Bytes;
    -046import 
    org.apache.hadoop.hbase.util.ClassSize;
    -047import 
    org.apache.hadoop.hbase.util.FSUtils;
    -048import 
    org.apache.hadoop.hbase.util.HasThread;
    -049import 
    org.apache.hadoop.hbase.util.Threads;
    -050import 
    org.apache.hadoop.hbase.wal.FSHLogProvider;
    -051import 
    org.apache.hadoop.hbase.wal.WALEdit;
    -052import 
    org.apache.hadoop.hbase.wal.WALKeyImpl;
    -053import 
    org.apache.hadoop.hbase.wal.WALProvider.Writer;
    -054import 
    org.apache.hadoop.hdfs.DFSOutputStream;
    -055import 
    org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
    -056import 
    org.apache.hadoop.hdfs.protocol.DatanodeInfo;
    -057import 
    org.apache.htrace.core.TraceScope;
    -058import 
    org.apache.yetus.audience.InterfaceAudience;
    -059import org.slf4j.Logger;
    -060import org.slf4j.LoggerFactory;
    -061import 
    org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
    -062
    -063/**
    -064 * The default implementation of FSWAL.
    -065 */
    -066@InterfaceAudience.Private
    -067public class FSHLog extends 
    AbstractFSWALWriter {
    -068  // IMPLEMENTATION NOTES:
    -069  //
    -070  // At the core is a ring buffer. Our 
    ring buffer is the LMAX Disruptor. It tries to
    -071  // minimize synchronizations and 
    volatile writes when multiple contending threads as is the case
    -072  // here appending and syncing on a 
    single WAL. The Disruptor is configured to handle multiple
    -073  // producers but it has one consumer 
    only (the producers in HBase are IPC Handlers calling append
    -074  // and then sync). The single 
    consumer/writer pulls the appends and syncs off the ring buffer.
    -075  // When a handler calls sync, it is 
    given back a future. The producer 'blocks' on the future so
    -076  // it does not return until the sync 
    completes. The future is passed over the ring buffer from
    -077  // the producer/handler to the consumer 
    thread where it does its best to batch up the producer
    -078  // syncs so one WAL sync actually spans 
    multiple producer sync invocations. How well the
    -079  // batching works depends on the write 
    rate; i.e. we tend to batch more in times of
    -080  // high writes/syncs.
    -081  //
    -082  // Calls to append now also wait until 
    the append has been done on the consumer side of the
    -083  // disruptor. We used to not wait but 
    it makes the implementation easier to grok if we have
    -084  // the region edit/sequence id after 
    the append returns.
    -085  //
    -086  // TODO: Handlers need to coordinate 
    appending AND syncing. Can we have the threads contend
    -087  // once only? Probably hard given syncs 
    take way longer than an append.
    -088  //
    -089  // The consumer threads pass the syncs 
    off to multiple syncing threads in a round robin fashion
    -090  // to ensure we keep up back-to-back FS 
    sync calls (FS sync calls are the long poll writing the
    -091  // WAL). The consumer thread passes the 
    futures to the sync threads for it to complete
    -092  // the futures when done.
    -093  //
    -094  // The 'sequence' in the below is the 
    sequence of the append/sync on the ringbuffer. It
    -095  // acts as a sort-of transaction id. It 
    is always incrementing.
    -096  //
    -097  // The RingBufferEventHandler class 
    hosts the ring buffer consuming code. The threads that
    -098  // do the actual FS sync are 
    implementations of SyncRunner. SafePointZigZagLatch is a
    -099  // synchronization class used to halt 
    the consumer at a safe point -- just after all outstanding
    -100  // syncs and appends have completed -- 
    so the log roller can swap the WAL out under it.
    -101  //
    -102  // We use ring buffer sequence as txid 
    of FSWALEntry and SyncFuture.
    -103  private static final Logger LOG = 
    LoggerFactory.getLogger(FSHLog.class);
    -104
    -105  /**
    -106   * The nexus at which all incoming 
    handlers meet. Does appends and sync with an ordering. Appends
    -107   * and syncs are each put on the ring 
    which means handlers need to smash up against the ring twice
    -108   * (can we make it once only? ... maybe 
    not since time to append is so different from time to sync
    -109   * and sometimes we don't want to sync 
    or we want to async the sync). The ring is where 

    [11/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.TableSkewCostFunction.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.TableSkewCostFunction.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.TableSkewCostFunction.html
    index 1bdfb9a..22bebe1 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.TableSkewCostFunction.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.TableSkewCostFunction.html
    @@ -300,7 +300,7 @@
     292continue;
     293  }
     294  if (!c.isNeeded()) {
    -295LOG.debug(c.getClass().getName() 
    + " indicated that its cost should not be considered");
    +295LOG.debug("{} not needed", 
    c.getClass().getSimpleName());
     296continue;
     297  }
     298  sumMultiplier += multiplier;
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
    index 1bdfb9a..22bebe1 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
    @@ -300,7 +300,7 @@
     292continue;
     293  }
     294  if (!c.isNeeded()) {
    -295LOG.debug(c.getClass().getName() 
    + " indicated that its cost should not be considered");
    +295LOG.debug("{} not needed", 
    c.getClass().getSimpleName());
     296continue;
     297  }
     298  sumMultiplier += multiplier;
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    index 1bdfb9a..22bebe1 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    @@ -300,7 +300,7 @@
     292continue;
     293  }
     294  if (!c.isNeeded()) {
    -295LOG.debug(c.getClass().getName() 
    + " indicated that its cost should not be considered");
    +295LOG.debug("{} not needed", 
    c.getClass().getSimpleName());
     296continue;
     297  }
     298  sumMultiplier += multiplier;
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
    index aae0575..e85a77c 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
    @@ -34,150 +34,181 @@
     026import 
    org.apache.hadoop.hbase.TableNotDisabledException;
     027import 
    org.apache.hadoop.hbase.TableNotEnabledException;
     028import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -029import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -030import 
    org.apache.hadoop.hbase.client.TableState;
    -031import 
    org.apache.hadoop.hbase.master.MasterFileSystem;
    -032import 
    org.apache.hadoop.hbase.master.MasterServices;
    -033import 
    org.apache.hadoop.hbase.master.TableStateManager;
    -034import 
    org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
    -035import 
    org.apache.hadoop.hbase.security.User;
    -036import 
    org.apache.yetus.audience.InterfaceAudience;
    -037
    -038/**
    -039 * Base class for all the Table 
    procedures that want to use a StateMachineProcedure.
    -040 * It provides helpers like basic 
    locking, sync latch, and toStringClassDetails().
    -041 */
    -042@InterfaceAudience.Private
    -043public abstract class 
    AbstractStateMachineTableProcedureTState
    -044extends 
    StateMachineProcedureMasterProcedureEnv, TState
    -045implements TableProcedureInterface 
    {
    -046
    -047  // used for compatibility with old 
    clients
    -048  private 

    [11/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
    index b99f924..2bb6cea 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
    @@ -37,1779 +37,1734 @@
     029import java.util.UUID;
     030import 
    java.util.concurrent.ConcurrentHashMap;
     031import 
    java.util.concurrent.ConcurrentMap;
    -032import java.util.regex.Matcher;
    -033
    -034import 
    org.apache.commons.collections4.map.AbstractReferenceMap;
    -035import 
    org.apache.commons.collections4.map.ReferenceMap;
    -036import 
    org.apache.hadoop.conf.Configuration;
    -037import org.apache.hadoop.fs.FileSystem;
    -038import org.apache.hadoop.fs.Path;
    -039import org.apache.hadoop.hbase.Cell;
    -040import 
    org.apache.hadoop.hbase.CompareOperator;
    -041import 
    org.apache.hadoop.hbase.Coprocessor;
    -042import 
    org.apache.hadoop.hbase.HBaseConfiguration;
    -043import 
    org.apache.hadoop.hbase.HConstants;
    -044import 
    org.apache.hadoop.hbase.RawCellBuilder;
    -045import 
    org.apache.hadoop.hbase.RawCellBuilderFactory;
    -046import 
    org.apache.hadoop.hbase.ServerName;
    -047import 
    org.apache.hadoop.hbase.SharedConnection;
    -048import 
    org.apache.hadoop.hbase.client.Append;
    -049import 
    org.apache.hadoop.hbase.client.Connection;
    -050import 
    org.apache.hadoop.hbase.client.Delete;
    -051import 
    org.apache.hadoop.hbase.client.Durability;
    -052import 
    org.apache.hadoop.hbase.client.Get;
    -053import 
    org.apache.hadoop.hbase.client.Increment;
    -054import 
    org.apache.hadoop.hbase.client.Mutation;
    -055import 
    org.apache.hadoop.hbase.client.Put;
    -056import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -057import 
    org.apache.hadoop.hbase.client.Result;
    -058import 
    org.apache.hadoop.hbase.client.Scan;
    -059import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -060import 
    org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
    -061import 
    org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
    -062import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorException;
    -063import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
    -064import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorService;
    -065import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
    -066import 
    org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
    -067import 
    org.apache.hadoop.hbase.coprocessor.EndpointObserver;
    -068import 
    org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
    -069import 
    org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
    -070import 
    org.apache.hadoop.hbase.coprocessor.ObserverContext;
    -071import 
    org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
    -072import 
    org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
    -073import 
    org.apache.hadoop.hbase.coprocessor.RegionObserver;
    -074import 
    org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
    -075import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    -076import 
    org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
    -077import 
    org.apache.hadoop.hbase.io.Reference;
    -078import 
    org.apache.hadoop.hbase.io.hfile.CacheConfig;
    -079import 
    org.apache.hadoop.hbase.metrics.MetricRegistry;
    -080import 
    org.apache.hadoop.hbase.regionserver.Region.Operation;
    -081import 
    org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
    -082import 
    org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
    -083import 
    org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
    -084import 
    org.apache.hadoop.hbase.security.User;
    -085import 
    org.apache.hbase.thirdparty.com.google.common.collect.Lists;
    -086import 
    org.apache.hadoop.hbase.util.Bytes;
    -087import 
    org.apache.hadoop.hbase.util.CoprocessorClassLoader;
    -088import 
    org.apache.hadoop.hbase.util.Pair;
    -089import 
    org.apache.hadoop.hbase.wal.WALEdit;
    -090import 
    org.apache.hadoop.hbase.wal.WALKey;
    -091import 
    org.apache.yetus.audience.InterfaceAudience;
    -092import org.slf4j.Logger;
    -093import org.slf4j.LoggerFactory;
    -094
    -095/**
    -096 * Implements the coprocessor environment 
    and runtime support for coprocessors
    -097 * loaded within a {@link Region}.
    -098 */
    -099@InterfaceAudience.Private
    -100public class RegionCoprocessorHost
    -101extends 
    CoprocessorHostRegionCoprocessor, RegionCoprocessorEnvironment {
    -102
    -103  private static final Logger LOG = 
    LoggerFactory.getLogger(RegionCoprocessorHost.class);
    -104  // The shared data map
    -105  private static final 
    ReferenceMapString, ConcurrentMapString, Object SHARED_DATA_MAP 
    =
    -106  new 
    ReferenceMap(AbstractReferenceMap.ReferenceStrength.HARD,
    -107  
    AbstractReferenceMap.ReferenceStrength.WEAK);
    -108
    -109  // 

    [11/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
    index 5b76e92..adccbc4 100644
    --- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
    +++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
    @@ -1216,7 +1216,7 @@ implements 
    -private boolean
    +boolean
     isClusterUp()
     
     
    @@ -2419,7 +2419,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     movedRegions
    -protectedhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String,HRegionServer.MovedRegionInfo movedRegions
    +protectedhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String,HRegionServer.MovedRegionInfo movedRegions
     
     
     
    @@ -2428,7 +2428,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     TIMEOUT_REGION_MOVED
    -private static finalint TIMEOUT_REGION_MOVED
    +private static finalint TIMEOUT_REGION_MOVED
     
     See Also:
     Constant
     Field Values
    @@ -2756,8 +2756,10 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     isClusterUp
    -privatebooleanisClusterUp()
    +publicbooleanisClusterUp()
     
    +Specified by:
    +isClusterUpin
     interfaceRegionServerServices
     Returns:
     True if the cluster is up.
     
    @@ -2769,7 +2771,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     run
    -publicvoidrun()
    +publicvoidrun()
     The HRegionServer sticks in this loop until closed.
     
     Specified by:
    @@ -2785,7 +2787,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     containsMetaTableRegions
    -privatebooleancontainsMetaTableRegions()
    +privatebooleancontainsMetaTableRegions()
     
     
     
    @@ -2794,7 +2796,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     areAllUserRegionsOffline
    -privatebooleanareAllUserRegionsOffline()
    +privatebooleanareAllUserRegionsOffline()
     
     
     
    @@ -2803,7 +2805,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     getWriteRequestCount
    -privatelonggetWriteRequestCount()
    +privatelonggetWriteRequestCount()
     
     Returns:
     Current write count for all online regions.
    @@ -2816,7 +2818,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     tryRegionServerReport
    -protectedvoidtryRegionServerReport(longreportStartTime,
    +protectedvoidtryRegionServerReport(longreportStartTime,
      longreportEndTime)
       throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     
    @@ -2831,7 +2833,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     reportRegionSizesForQuotas
    -publicbooleanreportRegionSizesForQuotas(RegionSizeStoreregionSizeStore)
    +publicbooleanreportRegionSizesForQuotas(RegionSizeStoreregionSizeStore)
     Reports the given map of Regions and their size on the 
    filesystem to the active Master.
     
     Specified by:
    @@ -2849,7 +2851,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     buildReportAndSend
    -voidbuildReportAndSend(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterfacerss,
    +voidbuildReportAndSend(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterfacerss,
     RegionSizeStoreregionSizeStore)
      throws 
    org.apache.hbase.thirdparty.com.google.protobuf.ServiceException
     Builds the region size report and sends it to the master. 
    Upon successful sending of the
    @@ -2869,7 +2871,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     buildRegionSpaceUseReportRequest
    -org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequestbuildRegionSpaceUseReportRequest(RegionSizeStoreregionSizes)
    +org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequestbuildRegionSpaceUseReportRequest(RegionSizeStoreregionSizes)
     Builds a 
    RegionServerStatusProtos.RegionSpaceUseReportRequest protobuf 
    message from the region size map.
     
     Parameters:
    @@ -2885,7 +2887,7 @@ protected static finalhttps://docs.oracle.com/javase/8/docs/api/j
     
     
     convertRegionSize
    -org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseconvertRegionSize(RegionInforegionInfo,
    

    [11/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    index b58c054..d30ee5e 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    @@ -166,27 +166,27 @@
     
     
     DataBlockEncoder.EncodedSeeker
    -RowIndexCodecV1.createSeeker(CellComparatorcomparator,
    +CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
    +PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
    +FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
    +DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
    +RowIndexCodecV1.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
    @@ -198,13 +198,13 @@
     
     
     https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -RowIndexCodecV1.decodeKeyValues(https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    -   HFileBlockDecodingContextdecodingCtx)
    +BufferedDataBlockEncoder.decodeKeyValues(https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    +   HFileBlockDecodingContextblkDecodingCtx)
     
     
     https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -BufferedDataBlockEncoder.decodeKeyValues(https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    -   HFileBlockDecodingContextblkDecodingCtx)
    +RowIndexCodecV1.decodeKeyValues(https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    +   HFileBlockDecodingContextdecodingCtx)
     
     
     
    @@ -279,18 +279,18 @@
     
     
     HFileBlockDecodingContext
    -HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
    -
    -
    -HFileBlockDecodingContext
     NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
     
    -
    +
     HFileBlockDecodingContext
     HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
     create a encoder specific decoding context for 
    reading.
     
     
    +
    +HFileBlockDecodingContext
    +HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    index 468913a..cbdb3c8 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    @@ -116,36 +116,36 @@
      HFileBlockDefaultDecodingContextdecodingCtx)
     
     
    -protected https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -CopyKeyDataBlockEncoder.internalDecodeKeyValues(https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    +protected abstract https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    +BufferedDataBlockEncoder.internalDecodeKeyValues(https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
       

    [11/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
    b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    index 3b7627b..5f975db 100644
    --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
    @@ -151,119 +151,119 @@
     
     
     Filter.ReturnCode
    -FilterListWithAND.filterCell(Cellc)
    +ColumnPrefixFilter.filterCell(Cellcell)
     
     
     Filter.ReturnCode
    -ValueFilter.filterCell(Cellc)
    +ColumnCountGetFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -SkipFilter.filterCell(Cellc)
    +RowFilter.filterCell(Cellv)
     
     
     Filter.ReturnCode
    -FamilyFilter.filterCell(Cellc)
    +FuzzyRowFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnPrefixFilter.filterCell(Cellcell)
    +Filter.filterCell(Cellc)
    +A way to filter based on the column family, column 
    qualifier and/or the column value.
    +
     
     
     Filter.ReturnCode
    -PageFilter.filterCell(Cellignored)
    +RandomRowFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -RowFilter.filterCell(Cellv)
    +FirstKeyOnlyFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnRangeFilter.filterCell(Cellc)
    +SkipFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnCountGetFilter.filterCell(Cellc)
    +TimestampsFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -MultipleColumnPrefixFilter.filterCell(Cellc)
    +ValueFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnPaginationFilter.filterCell(Cellc)
    +KeyOnlyFilter.filterCell(Cellignored)
     
     
     Filter.ReturnCode
    -DependentColumnFilter.filterCell(Cellc)
    +FamilyFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FilterListWithOR.filterCell(Cellc)
    +QualifierFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -InclusiveStopFilter.filterCell(Cellc)
    +FilterList.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -KeyOnlyFilter.filterCell(Cellignored)
    +ColumnRangeFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -MultiRowRangeFilter.filterCell(Cellignored)
    +ColumnPaginationFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -Filter.filterCell(Cellc)
    -A way to filter based on the column family, column 
    qualifier and/or the column value.
    -
    +FilterListWithAND.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FirstKeyOnlyFilter.filterCell(Cellc)
    +WhileMatchFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -WhileMatchFilter.filterCell(Cellc)
    +ColumnValueFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
    -Deprecated.
    -
    +MultiRowRangeFilter.filterCell(Cellignored)
     
     
     Filter.ReturnCode
    -TimestampsFilter.filterCell(Cellc)
    +PrefixFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FuzzyRowFilter.filterCell(Cellc)
    +DependentColumnFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -FilterList.filterCell(Cellc)
    +FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
    +Deprecated.
    +
     
     
     Filter.ReturnCode
    -RandomRowFilter.filterCell(Cellc)
    +PageFilter.filterCell(Cellignored)
     
     
     Filter.ReturnCode
    -PrefixFilter.filterCell(Cellc)
    +FilterListWithOR.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -SingleColumnValueFilter.filterCell(Cellc)
    +InclusiveStopFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -ColumnValueFilter.filterCell(Cellc)
    +MultipleColumnPrefixFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    -QualifierFilter.filterCell(Cellc)
    +SingleColumnValueFilter.filterCell(Cellc)
     
     
     Filter.ReturnCode
    @@ -279,158 +279,158 @@
     
     
     Filter.ReturnCode
    -ValueFilter.filterKeyValue(Cellc)
    +ColumnPrefixFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -SkipFilter.filterKeyValue(Cellc)
    +ColumnCountGetFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -FilterListBase.filterKeyValue(Cellc)
    +RowFilter.filterKeyValue(Cellc)
    +Deprecated.
    +
     
     
     Filter.ReturnCode
    -FamilyFilter.filterKeyValue(Cellc)
    +FuzzyRowFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -ColumnPrefixFilter.filterKeyValue(Cellc)
    -Deprecated.
    +Filter.filterKeyValue(Cellc)
    +Deprecated.
    +As of release 2.0.0, this 
    will be removed in HBase 3.0.0.
    + Instead use filterCell(Cell)
    +
     
     
     
     Filter.ReturnCode
    -PageFilter.filterKeyValue(Cellc)
    +RandomRowFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -RowFilter.filterKeyValue(Cellc)
    +FirstKeyOnlyFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -ColumnRangeFilter.filterKeyValue(Cellc)
    +SkipFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -ColumnCountGetFilter.filterKeyValue(Cellc)
    +TimestampsFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -MultipleColumnPrefixFilter.filterKeyValue(Cellc)
    +ValueFilter.filterKeyValue(Cellc)
     Deprecated.
     
     
     
     Filter.ReturnCode
    -ColumnPaginationFilter.filterKeyValue(Cellc)
    +KeyOnlyFilter.filterKeyValue(Cellignored)
     Deprecated.
     
     
     
     Filter.ReturnCode
    

    [11/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.Visitor.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.Visitor.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.Visitor.html
    index df5fa53..8fffb89 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.Visitor.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.Visitor.html
    @@ -42,1927 +42,2060 @@
     034import java.util.TreeMap;
     035import java.util.regex.Matcher;
     036import java.util.regex.Pattern;
    -037import 
    org.apache.hadoop.conf.Configuration;
    -038import 
    org.apache.hadoop.hbase.Cell.Type;
    -039import 
    org.apache.hadoop.hbase.client.Connection;
    -040import 
    org.apache.hadoop.hbase.client.ConnectionFactory;
    -041import 
    org.apache.hadoop.hbase.client.Consistency;
    -042import 
    org.apache.hadoop.hbase.client.Delete;
    -043import 
    org.apache.hadoop.hbase.client.Get;
    -044import 
    org.apache.hadoop.hbase.client.Mutation;
    -045import 
    org.apache.hadoop.hbase.client.Put;
    -046import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -047import 
    org.apache.hadoop.hbase.client.RegionInfoBuilder;
    -048import 
    org.apache.hadoop.hbase.client.RegionLocator;
    -049import 
    org.apache.hadoop.hbase.client.RegionReplicaUtil;
    -050import 
    org.apache.hadoop.hbase.client.RegionServerCallable;
    -051import 
    org.apache.hadoop.hbase.client.Result;
    -052import 
    org.apache.hadoop.hbase.client.ResultScanner;
    -053import 
    org.apache.hadoop.hbase.client.Scan;
    -054import 
    org.apache.hadoop.hbase.client.Table;
    -055import 
    org.apache.hadoop.hbase.client.TableState;
    -056import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -057import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    -058import 
    org.apache.hadoop.hbase.master.RegionState;
    -059import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    -060import 
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
    -061import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
    -062import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
    -063import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
    -064import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
    -065import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
    -066import 
    org.apache.hadoop.hbase.util.Bytes;
    -067import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -068import 
    org.apache.hadoop.hbase.util.ExceptionUtil;
    -069import 
    org.apache.hadoop.hbase.util.Pair;
    -070import 
    org.apache.hadoop.hbase.util.PairOfSameType;
    -071import 
    org.apache.yetus.audience.InterfaceAudience;
    -072import org.slf4j.Logger;
    -073import org.slf4j.LoggerFactory;
    -074
    -075import 
    org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
    -076
    -077/**
    -078 * p
    -079 * Read/write operations on region and 
    assignment information store in codehbase:meta/code.
    -080 * /p
    +037import java.util.stream.Collectors;
    +038import java.util.stream.Stream;
    +039import 
    org.apache.hadoop.conf.Configuration;
    +040import 
    org.apache.hadoop.hbase.Cell.Type;
    +041import 
    org.apache.hadoop.hbase.client.Connection;
    +042import 
    org.apache.hadoop.hbase.client.ConnectionFactory;
    +043import 
    org.apache.hadoop.hbase.client.Consistency;
    +044import 
    org.apache.hadoop.hbase.client.Delete;
    +045import 
    org.apache.hadoop.hbase.client.Get;
    +046import 
    org.apache.hadoop.hbase.client.Mutation;
    +047import 
    org.apache.hadoop.hbase.client.Put;
    +048import 
    org.apache.hadoop.hbase.client.RegionInfo;
    +049import 
    org.apache.hadoop.hbase.client.RegionInfoBuilder;
    +050import 
    org.apache.hadoop.hbase.client.RegionLocator;
    +051import 
    org.apache.hadoop.hbase.client.RegionReplicaUtil;
    +052import 
    org.apache.hadoop.hbase.client.RegionServerCallable;
    +053import 
    org.apache.hadoop.hbase.client.Result;
    +054import 
    org.apache.hadoop.hbase.client.ResultScanner;
    +055import 
    org.apache.hadoop.hbase.client.Scan;
    +056import 
    org.apache.hadoop.hbase.client.Table;
    +057import 
    org.apache.hadoop.hbase.client.TableState;
    +058import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    +059import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    +060import 
    org.apache.hadoop.hbase.master.RegionState;
    +061import 
    org.apache.hadoop.hbase.master.RegionState.State;
    +062import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    +063import 
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
    +064import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
    +065import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
    +066import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
    +067import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
    +068import 
    

    [11/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
    --
    diff --git 
    a/apidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html 
    b/apidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
    index 7b546c6..5b91331 100644
    --- a/apidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
    +++ b/apidocs/org/apache/hadoop/hbase/client/RequestController.ReturnCode.html
    @@ -97,10 +97,10 @@ var activeTableTab = "activeTableTab";
     
     
     
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
    +https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
     
     
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">java.lang.EnumRequestController.ReturnCode
    +https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">java.lang.EnumRequestController.ReturnCode
     
     
     org.apache.hadoop.hbase.client.RequestController.ReturnCode
    @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
     
     
     All Implemented Interfaces:
    -http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
     title="class or interface in java.io">Serializable, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
     title="class or interface in java.lang">ComparableRequestController.ReturnCode
    +https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
     title="class or interface in java.io">Serializable, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
     title="class or interface in java.lang">ComparableRequestController.ReturnCode
     
     
     Enclosing interface:
    @@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
     
     @InterfaceAudience.Public
     public static enum RequestController.ReturnCode
    -extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">EnumRequestController.ReturnCode
    +extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">EnumRequestController.ReturnCode
     
     
     
    @@ -174,7 +174,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is
     
     
     static RequestController.ReturnCode
    -valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
    +valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
     Returns the enum constant of this type with the specified 
    name.
     
     
    @@ -190,15 +190,15 @@ the order they are declared.
     
     
     
    -Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">Enum
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#compareTo-E-;
     title="class or interface in java.lang">compareTo, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#equals-java.lang.Object-;
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#finalize--;
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#getDeclaringClass--;
     title="class or interface in java.lang">getDeclaringClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#hashCode--;
     title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/
     api/java/lang/Enum.html?is-external=true#name--" title="class or interface in 
    java.lang">name, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#ordinal--;
     title="class or interface in java.lang">ordinal, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#toString--;
     title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#valueOf-java.lang.Class-java.lang.String-;
     title="class or interface in java.lang">valueOf
    +Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">Enum
    +https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
    index 4d03740..2f29cd8 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.CheckAndMutateBuilderImpl.html
    @@ -170,1134 +170,1131 @@
     162  final RpcRetryingCallerFactory 
    rpcCallerFactory,
     163  final RpcControllerFactory 
    rpcControllerFactory,
     164  final ExecutorService pool) {
    -165if (connection == null || 
    connection.isClosed()) {
    -166  throw new 
    IllegalArgumentException("Connection is null or closed.");
    -167}
    -168this.connection = connection;
    -169this.configuration = 
    connection.getConfiguration();
    -170this.connConfiguration = 
    connection.getConnectionConfiguration();
    -171if (pool == null) {
    -172  this.pool = 
    getDefaultExecutor(this.configuration);
    -173  this.cleanupPoolOnClose = true;
    -174} else {
    -175  this.pool = pool;
    -176  this.cleanupPoolOnClose = false;
    -177}
    -178if (rpcCallerFactory == null) {
    -179  this.rpcCallerFactory = 
    connection.getNewRpcRetryingCallerFactory(configuration);
    -180} else {
    -181  this.rpcCallerFactory = 
    rpcCallerFactory;
    -182}
    -183
    -184if (rpcControllerFactory == null) {
    -185  this.rpcControllerFactory = 
    RpcControllerFactory.instantiate(configuration);
    -186} else {
    -187  this.rpcControllerFactory = 
    rpcControllerFactory;
    -188}
    -189
    -190this.tableName = builder.tableName;
    -191this.operationTimeoutMs = 
    builder.operationTimeout;
    -192this.rpcTimeoutMs = 
    builder.rpcTimeout;
    -193this.readRpcTimeoutMs = 
    builder.readRpcTimeout;
    -194this.writeRpcTimeoutMs = 
    builder.writeRpcTimeout;
    -195this.scannerCaching = 
    connConfiguration.getScannerCaching();
    -196this.scannerMaxResultSize = 
    connConfiguration.getScannerMaxResultSize();
    -197
    -198// puts need to track errors globally 
    due to how the APIs currently work.
    -199multiAp = 
    this.connection.getAsyncProcess();
    -200this.locator = new 
    HRegionLocator(tableName, connection);
    -201  }
    -202
    -203  /**
    -204   * @return maxKeyValueSize from 
    configuration.
    -205   */
    -206  public static int 
    getMaxKeyValueSize(Configuration conf) {
    -207return 
    conf.getInt(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY, -1);
    -208  }
    -209
    -210  @Override
    -211  public Configuration getConfiguration() 
    {
    -212return configuration;
    -213  }
    -214
    -215  @Override
    -216  public TableName getName() {
    -217return tableName;
    -218  }
    -219
    -220  /**
    -221   * emINTERNAL/em Used 
    by unit tests and tools to do low-level
    -222   * manipulations.
    -223   * @return A Connection instance.
    -224   */
    -225  @VisibleForTesting
    -226  protected Connection getConnection() 
    {
    -227return this.connection;
    -228  }
    -229
    -230  @Override
    -231  @Deprecated
    -232  public HTableDescriptor 
    getTableDescriptor() throws IOException {
    -233HTableDescriptor htd = 
    HBaseAdmin.getHTableDescriptor(tableName, connection, rpcCallerFactory,
    -234  rpcControllerFactory, 
    operationTimeoutMs, readRpcTimeoutMs);
    -235if (htd != null) {
    -236  return new 
    ImmutableHTableDescriptor(htd);
    -237}
    -238return null;
    -239  }
    -240
    -241  @Override
    -242  public TableDescriptor getDescriptor() 
    throws IOException {
    -243return 
    HBaseAdmin.getTableDescriptor(tableName, connection, rpcCallerFactory,
    -244  rpcControllerFactory, 
    operationTimeoutMs, readRpcTimeoutMs);
    -245  }
    -246
    -247  /**
    -248   * Get the corresponding start keys and 
    regions for an arbitrary range of
    -249   * keys.
    -250   * p
    -251   * @param startKey Starting row in 
    range, inclusive
    -252   * @param endKey Ending row in range
    -253   * @param includeEndKey true if endRow 
    is inclusive, false if exclusive
    -254   * @return A pair of list of start keys 
    and list of HRegionLocations that
    -255   * contain the specified 
    range
    -256   * @throws IOException if a remote or 
    network exception occurs
    -257   */
    -258  private PairListbyte[], 
    ListHRegionLocation getKeysAndRegionsInRange(
    -259  final byte[] startKey, final byte[] 
    endKey, final boolean includeEndKey)
    -260  throws IOException {
    -261return 
    getKeysAndRegionsInRange(startKey, endKey, includeEndKey, false);
    -262  }
    -263
    -264  /**
    -265   * Get the corresponding start keys and 
    regions for an arbitrary range of
    -266   * keys.
    -267   * p
    -268   * @param startKey Starting row in 
    range, inclusive
    -269   * @param endKey Ending row in range
    -270   * @param includeEndKey true if endRow 
    is inclusive, false if exclusive
    -271   * @param reload true to reload 
    information or false to use cached 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
    index 802b925..a3e80ab 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
    @@ -73,229 +73,229 @@
     065import 
    java.util.concurrent.TimeoutException;
     066import 
    java.util.concurrent.atomic.AtomicBoolean;
     067import 
    java.util.concurrent.atomic.AtomicInteger;
    -068import 
    java.util.concurrent.atomic.AtomicLong;
    -069import 
    java.util.concurrent.atomic.LongAdder;
    -070import java.util.concurrent.locks.Lock;
    -071import 
    java.util.concurrent.locks.ReadWriteLock;
    -072import 
    java.util.concurrent.locks.ReentrantReadWriteLock;
    -073import java.util.function.Function;
    -074import 
    org.apache.hadoop.conf.Configuration;
    -075import org.apache.hadoop.fs.FileStatus;
    -076import org.apache.hadoop.fs.FileSystem;
    -077import 
    org.apache.hadoop.fs.LocatedFileStatus;
    -078import org.apache.hadoop.fs.Path;
    -079import org.apache.hadoop.hbase.Cell;
    -080import 
    org.apache.hadoop.hbase.CellBuilderType;
    -081import 
    org.apache.hadoop.hbase.CellComparator;
    -082import 
    org.apache.hadoop.hbase.CellComparatorImpl;
    -083import 
    org.apache.hadoop.hbase.CellScanner;
    -084import 
    org.apache.hadoop.hbase.CellUtil;
    -085import 
    org.apache.hadoop.hbase.CompareOperator;
    -086import 
    org.apache.hadoop.hbase.CompoundConfiguration;
    -087import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -088import 
    org.apache.hadoop.hbase.DroppedSnapshotException;
    -089import 
    org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
    -090import 
    org.apache.hadoop.hbase.HConstants;
    -091import 
    org.apache.hadoop.hbase.HConstants.OperationStatusCode;
    -092import 
    org.apache.hadoop.hbase.HDFSBlocksDistribution;
    -093import 
    org.apache.hadoop.hbase.HRegionInfo;
    -094import 
    org.apache.hadoop.hbase.KeyValue;
    -095import 
    org.apache.hadoop.hbase.KeyValueUtil;
    -096import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -097import 
    org.apache.hadoop.hbase.NotServingRegionException;
    -098import 
    org.apache.hadoop.hbase.PrivateCellUtil;
    -099import 
    org.apache.hadoop.hbase.RegionTooBusyException;
    -100import 
    org.apache.hadoop.hbase.TableName;
    -101import org.apache.hadoop.hbase.Tag;
    -102import org.apache.hadoop.hbase.TagUtil;
    -103import 
    org.apache.hadoop.hbase.UnknownScannerException;
    -104import 
    org.apache.hadoop.hbase.client.Append;
    -105import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
    -106import 
    org.apache.hadoop.hbase.client.CompactionState;
    -107import 
    org.apache.hadoop.hbase.client.Delete;
    -108import 
    org.apache.hadoop.hbase.client.Durability;
    -109import 
    org.apache.hadoop.hbase.client.Get;
    -110import 
    org.apache.hadoop.hbase.client.Increment;
    -111import 
    org.apache.hadoop.hbase.client.IsolationLevel;
    -112import 
    org.apache.hadoop.hbase.client.Mutation;
    -113import 
    org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
    -114import 
    org.apache.hadoop.hbase.client.Put;
    -115import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -116import 
    org.apache.hadoop.hbase.client.RegionReplicaUtil;
    -117import 
    org.apache.hadoop.hbase.client.Result;
    -118import 
    org.apache.hadoop.hbase.client.RowMutations;
    -119import 
    org.apache.hadoop.hbase.client.Scan;
    -120import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -121import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder;
    -122import 
    org.apache.hadoop.hbase.conf.ConfigurationManager;
    -123import 
    org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
    -124import 
    org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
    -125import 
    org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
    -126import 
    org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
    -127import 
    org.apache.hadoop.hbase.exceptions.TimeoutIOException;
    -128import 
    org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
    -129import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    -130import 
    org.apache.hadoop.hbase.filter.FilterWrapper;
    -131import 
    org.apache.hadoop.hbase.filter.IncompatibleFilterException;
    -132import 
    org.apache.hadoop.hbase.io.HFileLink;
    -133import 
    org.apache.hadoop.hbase.io.HeapSize;
    -134import 
    org.apache.hadoop.hbase.io.TimeRange;
    -135import 
    org.apache.hadoop.hbase.io.hfile.HFile;
    -136import 
    org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
    -137import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -138import 
    org.apache.hadoop.hbase.ipc.RpcCall;
    -139import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -140import 
    org.apache.hadoop.hbase.monitoring.MonitoredTask;
    -141import 
    org.apache.hadoop.hbase.monitoring.TaskMonitor;
    -142import 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
    index bd13b53..802b925 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
    @@ -900,7600 +900,7598 @@
     892if 
    (this.getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {
     893  status.setStatus("Writing region 
    info on filesystem");
     894  fs.checkRegionInfoOnFilesystem();
    -895} else {
    -896  if (LOG.isDebugEnabled()) {
    -897LOG.debug("Skipping creation of 
    .regioninfo file for " + this.getRegionInfo());
    -898  }
    -899}
    -900
    -901// Initialize all the HStores
    -902status.setStatus("Initializing all 
    the Stores");
    -903long maxSeqId = 
    initializeStores(reporter, status);
    -904this.mvcc.advanceTo(maxSeqId);
    -905if 
    (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
    -906  CollectionHStore stores = 
    this.stores.values();
    -907  try {
    -908// update the stores that we are 
    replaying
    -909
    stores.forEach(HStore::startReplayingFromWAL);
    -910// Recover any edits if 
    available.
    -911maxSeqId = Math.max(maxSeqId,
    -912  
    replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
    status));
    -913// Make sure mvcc is up to max.
    -914this.mvcc.advanceTo(maxSeqId);
    -915  } finally {
    -916// update the stores that we are 
    done replaying
    -917
    stores.forEach(HStore::stopReplayingFromWAL);
    -918  }
    -919}
    -920this.lastReplayedOpenRegionSeqId = 
    maxSeqId;
    +895}
    +896
    +897// Initialize all the HStores
    +898status.setStatus("Initializing all 
    the Stores");
    +899long maxSeqId = 
    initializeStores(reporter, status);
    +900this.mvcc.advanceTo(maxSeqId);
    +901if 
    (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
    +902  CollectionHStore stores = 
    this.stores.values();
    +903  try {
    +904// update the stores that we are 
    replaying
    +905
    stores.forEach(HStore::startReplayingFromWAL);
    +906// Recover any edits if 
    available.
    +907maxSeqId = Math.max(maxSeqId,
    +908  
    replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
    status));
    +909// Make sure mvcc is up to max.
    +910this.mvcc.advanceTo(maxSeqId);
    +911  } finally {
    +912// update the stores that we are 
    done replaying
    +913
    stores.forEach(HStore::stopReplayingFromWAL);
    +914  }
    +915}
    +916this.lastReplayedOpenRegionSeqId = 
    maxSeqId;
    +917
    +918
    this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
    +919this.writestate.flushRequested = 
    false;
    +920this.writestate.compacting.set(0);
     921
    -922
    this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
    -923this.writestate.flushRequested = 
    false;
    -924this.writestate.compacting.set(0);
    -925
    -926if (this.writestate.writesEnabled) 
    {
    -927  // Remove temporary data left over 
    from old regions
    -928  status.setStatus("Cleaning up 
    temporary data from old regions");
    -929  fs.cleanupTempDir();
    -930}
    -931
    -932if (this.writestate.writesEnabled) 
    {
    -933  status.setStatus("Cleaning up 
    detritus from prior splits");
    -934  // Get rid of any splits or merges 
    that were lost in-progress.  Clean out
    -935  // these directories here on open.  
    We may be opening a region that was
    -936  // being split but we crashed in 
    the middle of it all.
    -937  fs.cleanupAnySplitDetritus();
    -938  fs.cleanupMergesDir();
    -939}
    -940
    -941// Initialize split policy
    -942this.splitPolicy = 
    RegionSplitPolicy.create(this, conf);
    -943
    -944// Initialize flush policy
    -945this.flushPolicy = 
    FlushPolicyFactory.create(this, conf);
    -946
    -947long lastFlushTime = 
    EnvironmentEdgeManager.currentTime();
    -948for (HStore store: stores.values()) 
    {
    -949  
    this.lastStoreFlushTimeMap.put(store, lastFlushTime);
    -950}
    -951
    -952// Use maximum of log sequenceid or 
    that which was found in stores
    -953// (particularly if no recovered 
    edits, seqid will be -1).
    -954long nextSeqid = maxSeqId;
    -955if (this.writestate.writesEnabled) 
    {
    -956  nextSeqid = 
    WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(),
    -957  this.fs.getRegionDir(), 
    nextSeqid, 1);
    -958} else {
    -959  nextSeqid++;
    -960}
    -961
    -962LOG.info("Onlined " + 
    this.getRegionInfo().getShortNameToLog() +
    -963  "; next sequenceid=" + 
    nextSeqid);
    +922if 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterCoprocessorHost.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterCoprocessorHost.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterCoprocessorHost.html
    index 2f099b5..1687516 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterCoprocessorHost.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterCoprocessorHost.html
    @@ -121,11 +121,11 @@
     
     
     MasterCoprocessorHost
    -MasterServices.getMasterCoprocessorHost()
    +HMaster.getMasterCoprocessorHost()
     
     
     MasterCoprocessorHost
    -HMaster.getMasterCoprocessorHost()
    +MasterServices.getMasterCoprocessorHost()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterFileSystem.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterFileSystem.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterFileSystem.html
    index dc3868a..8769577 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterFileSystem.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterFileSystem.html
    @@ -121,11 +121,11 @@
     
     
     MasterFileSystem
    -MasterServices.getMasterFileSystem()
    +HMaster.getMasterFileSystem()
     
     
     MasterFileSystem
    -HMaster.getMasterFileSystem()
    +MasterServices.getMasterFileSystem()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
    index 317f689..48453a5 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
    @@ -309,13 +309,13 @@
     
     
     
    -private MasterServices
    -ServerManager.master
    -
    -
     (package private) MasterServices
     TableStateManager.master
     
    +
    +private MasterServices
    +ServerManager.master
    +
     
     private MasterServices
     MasterCoprocessorHost.masterServices
    @@ -334,15 +334,15 @@
     
     
     private MasterServices
    -SplitLogManager.server
    +RegionServerTracker.server
     
     
     private MasterServices
    -RegionServerTracker.server
    +SplitLogManager.server
     
     
     private MasterServices
    -MasterWalManager.services
    +MasterCoprocessorHost.MasterEnvironment.services
     
     
     private MasterServices
    @@ -350,7 +350,7 @@
     
     
     private MasterServices
    -MasterCoprocessorHost.MasterEnvironment.services
    +MasterWalManager.services
     
     
     
    @@ -532,13 +532,13 @@
     StochasticLoadBalancer.LocalityBasedCostFunction.services
     
     
    -private MasterServices
    -RegionLocationFinder.services
    -
    -
     protected MasterServices
     BaseLoadBalancer.services
     
    +
    +private MasterServices
    +RegionLocationFinder.services
    +
     
     
     
    @@ -550,15 +550,15 @@
     
     
     void
    -FavoredStochasticBalancer.setMasterServices(MasterServicesmasterServices)
    +StochasticLoadBalancer.setMasterServices(MasterServicesmasterServices)
     
     
     void
    -StochasticLoadBalancer.setMasterServices(MasterServicesmasterServices)
    +BaseLoadBalancer.setMasterServices(MasterServicesmasterServices)
     
     
     void
    -BaseLoadBalancer.setMasterServices(MasterServicesmasterServices)
    +FavoredStochasticBalancer.setMasterServices(MasterServicesmasterServices)
     
     
     (package private) void
    @@ -696,16 +696,16 @@
     
     
     private MasterServices
    -RecoverMetaProcedure.master
    -
    -
    -private MasterServices
     MasterProcedureUtil.NonceProcedureRunnable.master
     
    -
    +
     protected MasterServices
     RSProcedureDispatcher.master
     
    +
    +private MasterServices
    +RecoverMetaProcedure.master
    +
     
     
     
    @@ -883,17 +883,17 @@
     
     
     
    -void
    -MasterProcedureManagerHost.initialize(MasterServicesmaster,
    -  MetricsMastermetricsMaster)
    -
    -
     abstract void
     MasterProcedureManager.initialize(MasterServicesmaster,
       MetricsMastermetricsMaster)
     Initialize a globally barriered procedure for master.
     
     
    +
    +void
    +MasterProcedureManagerHost.initialize(MasterServicesmaster,
    +  MetricsMastermetricsMaster)
    +
     
     
     
    @@ -971,19 +971,19 @@
     
     
     private MasterServices
    -RSGroupAdminServer.master
    +RSGroupAdminEndpoint.master
     
     
     private MasterServices
    -RSGroupAdminEndpoint.master
    +RSGroupAdminServer.master
     
     
     private MasterServices
    -RSGroupBasedLoadBalancer.masterServices
    +RSGroupInfoManagerImpl.masterServices
     
     
     private MasterServices
    -RSGroupInfoManagerImpl.masterServices
    +RSGroupBasedLoadBalancer.masterServices
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterWalManager.html
    --
    diff --git 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    index 7161108..fe5ef34 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
    @@ -166,27 +166,27 @@
     
     
     DataBlockEncoder.EncodedSeeker
    -CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
    +RowIndexCodecV1.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
    +CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
    +DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
    +FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
     DataBlockEncoder.EncodedSeeker
    -RowIndexCodecV1.createSeeker(CellComparatorcomparator,
    +PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
     HFileBlockDecodingContextdecodingCtx)
     
     
    @@ -198,13 +198,13 @@
     
     
     http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    -   HFileBlockDecodingContextblkDecodingCtx)
    +RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    +   HFileBlockDecodingContextdecodingCtx)
     
     
     http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -RowIndexCodecV1.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    -   HFileBlockDecodingContextdecodingCtx)
    +BufferedDataBlockEncoder.decodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    +   HFileBlockDecodingContextblkDecodingCtx)
     
     
     
    @@ -279,17 +279,17 @@
     
     
     HFileBlockDecodingContext
    -NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
    +HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
     
     
     HFileBlockDecodingContext
    -HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
    -create a encoder specific decoding context for 
    reading.
    -
    +NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
     
     
     HFileBlockDecodingContext
    -HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
    +HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
    +create a encoder specific decoding context for 
    reading.
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    index 79b047f..66443b9 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
    @@ -116,36 +116,36 @@
      HFileBlockDefaultDecodingContextdecodingCtx)
     
     
    -protected abstract http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true;
     title="class or interface in java.io">DataInputStreamsource,
    +protected http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    index 5f7ce59..7244ce2 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    @@ -114,15 +114,15 @@
     
     
     private PriorityFunction
    -RpcExecutor.priority
    +SimpleRpcScheduler.priority
     
     
     private PriorityFunction
    -RpcExecutor.CallPriorityComparator.priority
    +RpcExecutor.priority
     
     
     private PriorityFunction
    -SimpleRpcScheduler.priority
    +RpcExecutor.CallPriorityComparator.priority
     
     
     
    @@ -319,7 +319,7 @@
     
     
     RpcScheduler
    -FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority)
     Deprecated.
     
    @@ -333,16 +333,18 @@
     
     
     RpcScheduler
    -RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority)
     Deprecated.
     
     
     
     RpcScheduler
    -FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority,
    -  Abortableserver)
    +  Abortableserver)
    +Constructs a RpcScheduler.
    +
     
     
     RpcScheduler
    @@ -352,11 +354,9 @@
     
     
     RpcScheduler
    -RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority,
    -  Abortableserver)
    -Constructs a RpcScheduler.
    -
    +  Abortableserver)
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    index 6d59fb7..4a25f5c 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    @@ -123,13 +123,13 @@
     
     
     void
    -RpcCallContext.setCallBack(RpcCallbackcallback)
    -Sets a callback which has to be executed at the end of this 
    RPC call.
    -
    +ServerCall.setCallBack(RpcCallbackcallback)
     
     
     void
    -ServerCall.setCallBack(RpcCallbackcallback)
    +RpcCallContext.setCallBack(RpcCallbackcallback)
    +Sets a callback which has to be executed at the end of this 
    RPC call.
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    index baa4e5e..fab4d7a 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    @@ -131,24 +131,32 @@
     
     
     
    -protected RpcControllerFactory
    -RegionAdminServiceCallable.rpcControllerFactory
    -
    -
     private RpcControllerFactory
     ConnectionImplementation.rpcControllerFactory
     
    +
    +protected RpcControllerFactory
    +ClientScanner.rpcControllerFactory
    +
     
    +protected RpcControllerFactory
    +RegionAdminServiceCallable.rpcControllerFactory
    +
    +
     (package private) RpcControllerFactory
     AsyncConnectionImpl.rpcControllerFactory
     
    -
    +
     private RpcControllerFactory
     HTable.rpcControllerFactory
     
    +
    +private RpcControllerFactory
    +HBaseAdmin.rpcControllerFactory
    +
     
     private RpcControllerFactory
    -RpcRetryingCallerWithReadReplicas.rpcControllerFactory
    +SecureBulkLoadClient.rpcControllerFactory
     
     
     protected RpcControllerFactory
    @@ -156,15 +164,7 @@
     
     
     private RpcControllerFactory
    -HBaseAdmin.rpcControllerFactory
    -
    -
    -private RpcControllerFactory
    -SecureBulkLoadClient.rpcControllerFactory
    -
    -
    -protected RpcControllerFactory
    -ClientScanner.rpcControllerFactory
    +RpcRetryingCallerWithReadReplicas.rpcControllerFactory
     
     
     (package private) RpcControllerFactory
    @@ -181,11 +181,11 @@
     
     
     RpcControllerFactory
    -ClusterConnection.getRpcControllerFactory()
    +ConnectionImplementation.getRpcControllerFactory()
     
     
     RpcControllerFactory
    -ConnectionImplementation.getRpcControllerFactory()
    +ClusterConnection.getRpcControllerFactory()
     
     
     private RpcControllerFactory
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcExecutor.Handler.html
    --
    diff --git 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
    b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    index 463f4fa..65795ae 100644
    --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    @@ -488,15 +488,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     static Filter
    -SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    @@ -506,63 +506,63 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     static Filter
    -ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -RowFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnRangeFilter.createFilterFromArguments(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    index 5f7ce59..7244ce2 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
    @@ -114,15 +114,15 @@
     
     
     private PriorityFunction
    -RpcExecutor.priority
    +SimpleRpcScheduler.priority
     
     
     private PriorityFunction
    -RpcExecutor.CallPriorityComparator.priority
    +RpcExecutor.priority
     
     
     private PriorityFunction
    -SimpleRpcScheduler.priority
    +RpcExecutor.CallPriorityComparator.priority
     
     
     
    @@ -319,7 +319,7 @@
     
     
     RpcScheduler
    -FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority)
     Deprecated.
     
    @@ -333,16 +333,18 @@
     
     
     RpcScheduler
    -RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority)
     Deprecated.
     
     
     
     RpcScheduler
    -FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority,
    -  Abortableserver)
    +  Abortableserver)
    +Constructs a RpcScheduler.
    +
     
     
     RpcScheduler
    @@ -352,11 +354,9 @@
     
     
     RpcScheduler
    -RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
    +FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
       PriorityFunctionpriority,
    -  Abortableserver)
    -Constructs a RpcScheduler.
    -
    +  Abortableserver)
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    index 6d59fb7..4a25f5c 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
    @@ -123,13 +123,13 @@
     
     
     void
    -RpcCallContext.setCallBack(RpcCallbackcallback)
    -Sets a callback which has to be executed at the end of this 
    RPC call.
    -
    +ServerCall.setCallBack(RpcCallbackcallback)
     
     
     void
    -ServerCall.setCallBack(RpcCallbackcallback)
    +RpcCallContext.setCallBack(RpcCallbackcallback)
    +Sets a callback which has to be executed at the end of this 
    RPC call.
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html 
    b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    index baa4e5e..fab4d7a 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
    @@ -131,24 +131,32 @@
     
     
     
    -protected RpcControllerFactory
    -RegionAdminServiceCallable.rpcControllerFactory
    -
    -
     private RpcControllerFactory
     ConnectionImplementation.rpcControllerFactory
     
    +
    +protected RpcControllerFactory
    +ClientScanner.rpcControllerFactory
    +
     
    +protected RpcControllerFactory
    +RegionAdminServiceCallable.rpcControllerFactory
    +
    +
     (package private) RpcControllerFactory
     AsyncConnectionImpl.rpcControllerFactory
     
    -
    +
     private RpcControllerFactory
     HTable.rpcControllerFactory
     
    +
    +private RpcControllerFactory
    +HBaseAdmin.rpcControllerFactory
    +
     
     private RpcControllerFactory
    -RpcRetryingCallerWithReadReplicas.rpcControllerFactory
    +SecureBulkLoadClient.rpcControllerFactory
     
     
     protected RpcControllerFactory
    @@ -156,15 +164,7 @@
     
     
     private RpcControllerFactory
    -HBaseAdmin.rpcControllerFactory
    -
    -
    -private RpcControllerFactory
    -SecureBulkLoadClient.rpcControllerFactory
    -
    -
    -protected RpcControllerFactory
    -ClientScanner.rpcControllerFactory
    +RpcRetryingCallerWithReadReplicas.rpcControllerFactory
     
     
     (package private) RpcControllerFactory
    @@ -181,11 +181,11 @@
     
     
     RpcControllerFactory
    -ClusterConnection.getRpcControllerFactory()
    +ConnectionImplementation.getRpcControllerFactory()
     
     
     RpcControllerFactory
    -ConnectionImplementation.getRpcControllerFactory()
    +ClusterConnection.getRpcControllerFactory()
     
     
     private RpcControllerFactory
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcExecutor.Handler.html
    --
    diff --git 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    index 9bd0287..cfc5ad9 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    @@ -157,6 +157,14 @@
     
     
     
    +protected BaseLoadBalancer.Cluster.Action
    +FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
    +
    +
    +(package private) BaseLoadBalancer.Cluster.Action
    +FavoredStochasticBalancer.FavoredNodeLoadPicker.generate(BaseLoadBalancer.Clustercluster)
    +
    +
     (package private) abstract BaseLoadBalancer.Cluster.Action
     StochasticLoadBalancer.CandidateGenerator.generate(BaseLoadBalancer.Clustercluster)
     
    @@ -181,14 +189,6 @@
     StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.generate(BaseLoadBalancer.Clustercluster)
     
     
    -protected BaseLoadBalancer.Cluster.Action
    -FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
    -
    -
    -(package private) BaseLoadBalancer.Cluster.Action
    -FavoredStochasticBalancer.FavoredNodeLoadPicker.generate(BaseLoadBalancer.Clustercluster)
    -
    -
     private int
     FavoredStochasticBalancer.FavoredNodeLocalityPicker.getDifferentFavoredNode(BaseLoadBalancer.Clustercluster,
    http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListServerNamefavoredNodes,
    @@ -247,12 +247,12 @@
     
     
     private int
    -StochasticLoadBalancer.LoadCandidateGenerator.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
    +FavoredStochasticBalancer.FavoredNodeLoadPicker.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
      intthisServer)
     
     
     private int
    -FavoredStochasticBalancer.FavoredNodeLoadPicker.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
    +StochasticLoadBalancer.LoadCandidateGenerator.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
      intthisServer)
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
    index b99a6b3..f82f773 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
    @@ -197,8 +197,8 @@
     
     java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
     title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
     title="class or interface in java.io">Serializable)
     
    -org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
     org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type
    +org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    index ed61e0f..d29d09b 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    @@ -117,11 +117,11 @@
     
     
     CatalogJanitor
    -HMaster.getCatalogJanitor()
    +MasterServices.getCatalogJanitor()
     
     
     CatalogJanitor
    -MasterServices.getCatalogJanitor()
    +HMaster.getCatalogJanitor()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    index fcc4f20..fb802a0 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    @@ -132,11 +132,11 @@
     
     
     ClusterSchema
    -HMaster.getClusterSchema()
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcConnection.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcConnection.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcConnection.html
    index 32c21a4..92a291e 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcConnection.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcConnection.html
    @@ -259,107 +259,105 @@
     251  }
     252
     253  private void connect() {
    -254if (LOG.isDebugEnabled()) {
    -255  LOG.debug("Connecting to " + 
    remoteId.address);
    -256}
    -257
    -258this.channel = new 
    Bootstrap().group(rpcClient.group).channel(rpcClient.channelClass)
    -259
    .option(ChannelOption.TCP_NODELAY, rpcClient.isTcpNoDelay())
    -260
    .option(ChannelOption.SO_KEEPALIVE, rpcClient.tcpKeepAlive)
    -261
    .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, rpcClient.connectTO)
    -262.handler(new 
    BufferCallBeforeInitHandler()).localAddress(rpcClient.localAddr)
    -263
    .remoteAddress(remoteId.address).connect().addListener(new 
    ChannelFutureListener() {
    -264
    -265  @Override
    -266  public void 
    operationComplete(ChannelFuture future) throws Exception {
    -267Channel ch = 
    future.channel();
    -268if (!future.isSuccess()) {
    -269  failInit(ch, 
    toIOE(future.cause()));
    -270  
    rpcClient.failedServers.addToFailedServers(remoteId.address, future.cause());
    -271  return;
    -272}
    -273
    ch.writeAndFlush(connectionHeaderPreamble.retainedDuplicate());
    -274if (useSasl) {
    -275  saslNegotiate(ch);
    -276} else {
    -277  // send the connection 
    header to server
    -278  
    ch.write(connectionHeaderWithLength.retainedDuplicate());
    -279  established(ch);
    -280}
    -281  }
    -282}).channel();
    -283  }
    -284
    -285  private void write(Channel ch, final 
    Call call) {
    -286
    ch.writeAndFlush(call).addListener(new ChannelFutureListener() {
    -287
    -288  @Override
    -289  public void 
    operationComplete(ChannelFuture future) throws Exception {
    -290// Fail the call if we failed to 
    write it out. This usually because the channel is
    -291// closed. This is needed because 
    we may shutdown the channel inside event loop and
    -292// there may still be some 
    pending calls in the event loop queue after us.
    -293if (!future.isSuccess()) {
    -294  
    call.setException(toIOE(future.cause()));
    -295}
    -296  }
    -297});
    -298  }
    -299
    -300  @Override
    -301  public synchronized void 
    sendRequest(final Call call, HBaseRpcController hrc) throws IOException {
    -302if (reloginInProgress) {
    -303  throw new IOException("Can not send 
    request because relogin is in progress.");
    -304}
    -305hrc.notifyOnCancel(new 
    RpcCallbackObject() {
    -306
    -307  @Override
    -308  public void run(Object parameter) 
    {
    -309setCancelled(call);
    -310synchronized (this) {
    -311  if (channel != null) {
    -312
    channel.pipeline().fireUserEventTriggered(new CallEvent(CANCELLED, call));
    -313  }
    -314}
    -315  }
    -316}, new CancellationCallback() {
    -317
    -318  @Override
    -319  public void run(boolean cancelled) 
    throws IOException {
    -320if (cancelled) {
    -321  setCancelled(call);
    -322} else {
    -323  if (channel == null) {
    -324connect();
    -325  }
    -326  scheduleTimeoutTask(call);
    -327  final Channel ch = channel;
    -328  // We must move the whole 
    writeAndFlush call inside event loop otherwise there will be a
    -329  // race condition.
    -330  // In netty's 
    DefaultChannelPipeline, it will find the first outbound handler in the
    -331  // current thread and then 
    schedule a task to event loop which will start the process from
    -332  // that outbound handler. It is 
    possible that the first handler is
    -333  // BufferCallBeforeInitHandler 
    when we call writeAndFlush here, but the connection is set
    -334  // up at the same time so in 
    the event loop thread we remove the
    -335  // BufferCallBeforeInitHandler, 
    and then our writeAndFlush task comes, still calls the
    -336  // write method of 
    BufferCallBeforeInitHandler.
    -337  // This may be considered as a 
    bug of netty, but anyway there is a work around so let's
    -338  // fix it by ourselves first.
    -339  if 
    (ch.eventLoop().inEventLoop()) {
    -340write(ch, call);
    -341  } else {
    -342ch.eventLoop().execute(new 
    Runnable() {
    -343
    -344  @Override
    -345  public void run() {
    -346write(ch, call);
    -347  }
    -348});
    -349  }
    -350}
    -351   

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    index 4a87b9d..7515d7b 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    @@ -25,3542 +25,3570 @@
     017 */
     018package org.apache.hadoop.hbase.master;
     019
    -020import com.google.protobuf.Descriptors;
    -021import com.google.protobuf.Service;
    -022import java.io.IOException;
    -023import java.io.InterruptedIOException;
    -024import java.lang.reflect.Constructor;
    -025import 
    java.lang.reflect.InvocationTargetException;
    -026import java.net.InetAddress;
    -027import java.net.InetSocketAddress;
    -028import java.net.UnknownHostException;
    -029import java.util.ArrayList;
    -030import java.util.Arrays;
    -031import java.util.Collection;
    -032import java.util.Collections;
    -033import java.util.Comparator;
    -034import java.util.EnumSet;
    -035import java.util.HashMap;
    -036import java.util.Iterator;
    -037import java.util.List;
    -038import java.util.Map;
    -039import java.util.Map.Entry;
    -040import java.util.Objects;
    -041import java.util.Set;
    -042import 
    java.util.concurrent.ExecutionException;
    -043import java.util.concurrent.Future;
    -044import java.util.concurrent.TimeUnit;
    -045import 
    java.util.concurrent.TimeoutException;
    -046import 
    java.util.concurrent.atomic.AtomicInteger;
    -047import 
    java.util.concurrent.atomic.AtomicReference;
    -048import java.util.function.Function;
    -049import java.util.regex.Pattern;
    -050import java.util.stream.Collectors;
    -051import javax.servlet.ServletException;
    -052import javax.servlet.http.HttpServlet;
    -053import 
    javax.servlet.http.HttpServletRequest;
    -054import 
    javax.servlet.http.HttpServletResponse;
    -055import 
    org.apache.commons.lang3.StringUtils;
    -056import 
    org.apache.hadoop.conf.Configuration;
    -057import org.apache.hadoop.fs.Path;
    -058import 
    org.apache.hadoop.hbase.ClusterId;
    -059import 
    org.apache.hadoop.hbase.ClusterMetrics;
    -060import 
    org.apache.hadoop.hbase.ClusterMetrics.Option;
    -061import 
    org.apache.hadoop.hbase.ClusterMetricsBuilder;
    -062import 
    org.apache.hadoop.hbase.CoordinatedStateException;
    -063import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -064import 
    org.apache.hadoop.hbase.HBaseIOException;
    -065import 
    org.apache.hadoop.hbase.HBaseInterfaceAudience;
    -066import 
    org.apache.hadoop.hbase.HConstants;
    -067import 
    org.apache.hadoop.hbase.InvalidFamilyOperationException;
    -068import 
    org.apache.hadoop.hbase.MasterNotRunningException;
    -069import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -070import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -071import 
    org.apache.hadoop.hbase.PleaseHoldException;
    -072import 
    org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
    -073import 
    org.apache.hadoop.hbase.ServerLoad;
    -074import 
    org.apache.hadoop.hbase.ServerMetricsBuilder;
    -075import 
    org.apache.hadoop.hbase.ServerName;
    -076import 
    org.apache.hadoop.hbase.TableDescriptors;
    -077import 
    org.apache.hadoop.hbase.TableName;
    -078import 
    org.apache.hadoop.hbase.TableNotDisabledException;
    -079import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -080import 
    org.apache.hadoop.hbase.UnknownRegionException;
    -081import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
    -082import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
    -083import 
    org.apache.hadoop.hbase.client.MasterSwitchType;
    -084import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -085import 
    org.apache.hadoop.hbase.client.Result;
    -086import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -087import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder;
    -088import 
    org.apache.hadoop.hbase.client.TableState;
    -089import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
    -090import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -091import 
    org.apache.hadoop.hbase.exceptions.MergeRegionException;
    -092import 
    org.apache.hadoop.hbase.executor.ExecutorType;
    -093import 
    org.apache.hadoop.hbase.favored.FavoredNodesManager;
    -094import 
    org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
    -095import 
    org.apache.hadoop.hbase.http.InfoServer;
    -096import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -097import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -098import 
    org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
    -099import 
    org.apache.hadoop.hbase.log.HBaseMarkers;
    -100import 
    org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
    -101import 
    org.apache.hadoop.hbase.master.assignment.AssignmentManager;
    -102import 
    org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
    -103import 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.ScannerThread.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.ScannerThread.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.ScannerThread.html
    index 4d300c2..b1e5eeb 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.ScannerThread.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.ScannerThread.html
    @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private static class TestAvoidCellReferencesIntoShippedBlocks.ScannerThread
    +private static class TestAvoidCellReferencesIntoShippedBlocks.ScannerThread
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true;
     title="class or interface in java.lang">Thread
     
     
    @@ -244,7 +244,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
     
     
     table
    -private finalorg.apache.hadoop.hbase.client.Table table
    +private finalorg.apache.hadoop.hbase.client.Table table
     
     
     
    @@ -253,7 +253,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
     
     
     cache
    -private finalorg.apache.hadoop.hbase.io.hfile.BlockCache cache
    +private finalorg.apache.hadoop.hbase.io.hfile.BlockCache cache
     
     
     
    @@ -270,7 +270,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
     
     
     ScannerThread
    -publicScannerThread(org.apache.hadoop.hbase.client.Tabletable,
    +publicScannerThread(org.apache.hadoop.hbase.client.Tabletable,
      
    org.apache.hadoop.hbase.io.hfile.BlockCachecache)
     
     
    @@ -288,7 +288,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
     
     
     run
    -publicvoidrun()
    +publicvoidrun()
     
     Specified by:
     http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
     title="class or interface in java.lang">runin 
    interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
     title="class or interface in java.lang">Runnable
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.html
    index 41d7d00..e560cb0 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.html
    @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class TestAvoidCellReferencesIntoShippedBlocks
    +public class TestAvoidCellReferencesIntoShippedBlocks
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     
     
    @@ -157,74 +157,78 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Field and Description
     
     
    +static HBaseClassTestRule
    +CLASS_RULE
    +
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">CountDownLatch
     compactReadLatch
     
    -
    +
     private static byte[]
     data
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
     title="class or interface in 
    java.util.concurrent.atomic">AtomicBoolean
     doScan
     
    -
    +
     private static byte[][]
     FAMILIES_1
     
    -
    +
     private static byte[]
     FAMILY
     
    -
    +
     private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">CountDownLatch
     latch
     
    -
    +
     org.junit.rules.TestName
     name
     
    -
    +
     private static byte[]
     QUALIFIER
     
    -
    +
     private static byte[]
     QUALIFIER1
     
    -
    +
     private static byte[]
     ROW
     
    -
    +
     private static byte[]
     ROW1
     
    -
    +
     private static byte[]
     ROW2
     
    -
    +
     private static byte[]
     ROW3
     
    -
    +
     private static byte[]
     ROW4
     
    -
    +
     private static byte[]
     ROW5
     
    -
    +
     (package private) static byte[][]
     ROWS
     
    -
    +
     protected static int
     SLAVES
     
    -
    +
     protected static HBaseTestingUtility
     TEST_UTIL
     
    @@ -298,13 +302,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     Field Detail
    +
    +
    +
    +
    +
    +CLASS_RULE
    +public static finalHBaseClassTestRule CLASS_RULE
    +
    +
     
     
     
     
     
     TEST_UTIL
    -protected static finalHBaseTestingUtility TEST_UTIL
    +protected static finalHBaseTestingUtility TEST_UTIL
     
     
     
    @@ -313,7 +326,7 @@ extends 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
    index 75e3c2b..6c25207 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
    @@ -80,7 +80,7 @@
     072  }
     073
     074  protected ListString 
    filterMissingFiles(ListString incrBackupFileList) throws IOException 
    {
    -075ListString list = new 
    ArrayListString();
    +075ListString list = new 
    ArrayList();
     076for (String file : 
    incrBackupFileList) {
     077  Path p = new Path(file);
     078  if (fs.exists(p) || 
    isActiveWalPath(p)) {
    @@ -102,201 +102,201 @@
     094  }
     095
     096  protected static int getIndex(TableName 
    tbl, ListTableName sTableList) {
    -097if (sTableList == null) return 0;
    -098for (int i = 0; i  
    sTableList.size(); i++) {
    -099  if (tbl.equals(sTableList.get(i))) 
    {
    -100return i;
    -101  }
    -102}
    -103return -1;
    -104  }
    -105
    -106  /*
    -107   * Reads bulk load records from backup 
    table, iterates through the records and forms the paths
    -108   * for bulk loaded hfiles. Copies the 
    bulk loaded hfiles to backup destination
    -109   * @param sTableList list of tables to 
    be backed up
    -110   * @return map of table to List of 
    files
    -111   */
    -112  @SuppressWarnings("unchecked")
    -113  protected Mapbyte[], 
    ListPath[] handleBulkLoad(ListTableName sTableList) throws 
    IOException {
    -114Mapbyte[], ListPath[] 
    mapForSrc = new Map[sTableList.size()];
    -115ListString activeFiles = new 
    ArrayListString();
    -116ListString archiveFiles = new 
    ArrayListString();
    -117PairMapTableName, 
    MapString, MapString, ListPairString, 
    Boolean, Listbyte[] pair =
    -118
    backupManager.readBulkloadRows(sTableList);
    -119MapTableName, MapString, 
    MapString, ListPairString, Boolean map = 
    pair.getFirst();
    -120FileSystem tgtFs;
    -121try {
    -122  tgtFs = FileSystem.get(new 
    URI(backupInfo.getBackupRootDir()), conf);
    -123} catch (URISyntaxException use) {
    -124  throw new IOException("Unable to 
    get FileSystem", use);
    -125}
    -126Path rootdir = 
    FSUtils.getRootDir(conf);
    -127Path tgtRoot = new Path(new 
    Path(backupInfo.getBackupRootDir()), backupId);
    -128
    -129for (Map.EntryTableName, 
    MapString, MapString, ListPairString, 
    Boolean tblEntry :
    -130  map.entrySet()) {
    -131  TableName srcTable = 
    tblEntry.getKey();
    +097if (sTableList == null) {
    +098  return 0;
    +099}
    +100
    +101for (int i = 0; i  
    sTableList.size(); i++) {
    +102  if (tbl.equals(sTableList.get(i))) 
    {
    +103return i;
    +104  }
    +105}
    +106return -1;
    +107  }
    +108
    +109  /*
    +110   * Reads bulk load records from backup 
    table, iterates through the records and forms the paths
    +111   * for bulk loaded hfiles. Copies the 
    bulk loaded hfiles to backup destination
    +112   * @param sTableList list of tables to 
    be backed up
    +113   * @return map of table to List of 
    files
    +114   */
    +115  @SuppressWarnings("unchecked")
    +116  protected Mapbyte[], 
    ListPath[] handleBulkLoad(ListTableName sTableList)
    +117  throws IOException {
    +118Mapbyte[], ListPath[] 
    mapForSrc = new Map[sTableList.size()];
    +119ListString activeFiles = new 
    ArrayList();
    +120ListString archiveFiles = new 
    ArrayList();
    +121PairMapTableName, 
    MapString, MapString, ListPairString, 
    Boolean, Listbyte[] pair =
    +122
    backupManager.readBulkloadRows(sTableList);
    +123MapTableName, MapString, 
    MapString, ListPairString, Boolean map = 
    pair.getFirst();
    +124FileSystem tgtFs;
    +125try {
    +126  tgtFs = FileSystem.get(new 
    URI(backupInfo.getBackupRootDir()), conf);
    +127} catch (URISyntaxException use) {
    +128  throw new IOException("Unable to 
    get FileSystem", use);
    +129}
    +130Path rootdir = 
    FSUtils.getRootDir(conf);
    +131Path tgtRoot = new Path(new 
    Path(backupInfo.getBackupRootDir()), backupId);
     132
    -133  int srcIdx = getIndex(srcTable, 
    sTableList);
    -134  if (srcIdx  0) {
    -135LOG.warn("Couldn't find " + 
    srcTable + " in source table List");
    -136continue;
    -137  }
    -138  if (mapForSrc[srcIdx] == null) {
    -139mapForSrc[srcIdx] = new 
    TreeMapbyte[], ListPath(Bytes.BYTES_COMPARATOR);
    -140  }
    -141  Path tblDir = 
    FSUtils.getTableDir(rootdir, srcTable);
    -142  Path tgtTable = new Path(new 
    Path(tgtRoot, srcTable.getNamespaceAsString()),
    -143  
    srcTable.getQualifierAsString());
    -144  for 
    (Map.EntryString,MapString,ListPairString, 
    Boolean regionEntry :
    -145
    tblEntry.getValue().entrySet()){
    -146String regionName = 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestCellModel.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestCellModel.html 
    b/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestCellModel.html
    index ac36663..5ea5930 100644
    --- a/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestCellModel.html
    +++ b/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestCellModel.html
    @@ -100,12 +100,6 @@ var activeTableTab = "activeTableTab";
     http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
     
     
    -junit.framework.Assert
    -
    -
    -junit.framework.TestCase
    -
    -
     org.apache.hadoop.hbase.rest.model.TestModelBaseorg.apache.hadoop.hbase.rest.model.CellModel
     
     
    @@ -115,20 +109,12 @@ var activeTableTab = "activeTableTab";
     
     
     
    -
    -
    -
    -
     
     
     
    -
    -All Implemented Interfaces:
    -junit.framework.Test
    -
     
     
    -public class TestCellModel
    +public class TestCellModel
     extends TestModelBaseorg.apache.hadoop.hbase.rest.model.CellModel
     
     
    @@ -236,18 +222,11 @@ extends fromJSON,
     fromPB,
     fromXML,
     testFromJSON,
     testToJSON,
     testToXML,
     toJSON,
     toPB,
     toXML
     
     
    -
    -
    -
    -Methods inherited from classjunit.framework.TestCase
    -assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertFalse, assertFalse, assertNotNull, assertNotNull, assertNotSame, 
    assertNotSame, assertNull, assertNull, assertSame, assertSame, assertTrue, 
    assertTrue, countTestCases, createResult, fail, fail, failNotEquals, 
    failNotSame, failSame, format, getName, run, run, runBare, runTest, setName, 
    setUp, tearDown, toString
    -
    -
     
     
     
     Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
     title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
     /Object.html?is-external=true#notifyAll--" title="class or interface in 
    java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
     title="class or interface in java.lang">wait
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
     title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
     /Object.html?is-external=true#notifyAll--" title="class or interface in 
    java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
     title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
     title="class or interface in java.lang">wait, 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
    index 64c3ba9..211ff82 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
    @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public static interface HRegion.BulkLoadListener
    +public static interface HRegion.BulkLoadListener
     Listener class to enable callers of
      bulkLoadHFile() to perform any necessary
      pre/post processing of a given bulkload call
    @@ -174,7 +174,7 @@ var activeTableTab = "activeTableTab";
     
     
     prepareBulkLoad
    -http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringprepareBulkLoad(byte[]family,
    +http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringprepareBulkLoad(byte[]family,
    http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringsrcPath,
    booleancopyFile)
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
    @@ -196,7 +196,7 @@ var activeTableTab = "activeTableTab";
     
     
     doneBulkLoad
    -voiddoneBulkLoad(byte[]family,
    +voiddoneBulkLoad(byte[]family,
       http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringsrcPath)
    throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Called after a successful HFile load
    @@ -215,7 +215,7 @@ var activeTableTab = "activeTableTab";
     
     
     failedBulkLoad
    -voidfailedBulkLoad(byte[]family,
    +voidfailedBulkLoad(byte[]family,
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringsrcPath)
      throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Called after a failed HFile load
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
     
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
    index ea28270..b12ee45 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
    @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public static enum HRegion.FlushResult.Result
    +public static enum HRegion.FlushResult.Result
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">EnumHRegion.FlushResult.Result
     
     
    @@ -216,7 +216,7 @@ the order they are declared.
     
     
     FLUSHED_NO_COMPACTION_NEEDED
    -public static finalHRegion.FlushResult.Result FLUSHED_NO_COMPACTION_NEEDED
    +public static finalHRegion.FlushResult.Result FLUSHED_NO_COMPACTION_NEEDED
     
     
     
    @@ -225,7 +225,7 @@ the order they are declared.
     
     
     FLUSHED_COMPACTION_NEEDED
    -public static finalHRegion.FlushResult.Result FLUSHED_COMPACTION_NEEDED
    +public static finalHRegion.FlushResult.Result FLUSHED_COMPACTION_NEEDED
     
     
     
    @@ -234,7 +234,7 @@ the order they are declared.
     
     
     CANNOT_FLUSH_MEMSTORE_EMPTY
    -public static finalHRegion.FlushResult.Result CANNOT_FLUSH_MEMSTORE_EMPTY
    +public static finalHRegion.FlushResult.Result CANNOT_FLUSH_MEMSTORE_EMPTY
     
     
     
    @@ -243,7 +243,7 @@ the order they are declared.
     
     
     CANNOT_FLUSH
    -public static finalHRegion.FlushResult.Result CANNOT_FLUSH
    +public static finalHRegion.FlushResult.Result CANNOT_FLUSH
     
     
     
    @@ -260,7 +260,7 @@ the order they are declared.
     
     
     values
    -public staticHRegion.FlushResult.Result[]values()
    +public staticHRegion.FlushResult.Result[]values()
     Returns an array containing the constants of this enum 
    type, in
     the order they are declared.  This method may be used to iterate
     over the constants as follows:
    @@ -280,7 +280,7 @@ for (HRegion.FlushResult.Result c : 
    HRegion.FlushResult.Result.values())
     
     
     valueOf
    -public 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.GoodRsExecutor.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.GoodRsExecutor.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.GoodRsExecutor.html
    index f1db5ca..d8515d7 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.GoodRsExecutor.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.GoodRsExecutor.html
    @@ -32,813 +32,820 @@
     024import static org.junit.Assert.fail;
     025
     026import java.io.IOException;
    -027import java.net.SocketTimeoutException;
    -028import java.util.NavigableMap;
    -029import java.util.Random;
    -030import java.util.Set;
    -031import java.util.SortedSet;
    -032import 
    java.util.concurrent.ConcurrentSkipListMap;
    -033import 
    java.util.concurrent.ConcurrentSkipListSet;
    -034import 
    java.util.concurrent.ExecutionException;
    -035import java.util.concurrent.Executors;
    -036import java.util.concurrent.Future;
    -037import 
    java.util.concurrent.ScheduledExecutorService;
    -038import java.util.concurrent.TimeUnit;
    -039
    -040import 
    org.apache.hadoop.conf.Configuration;
    -041import 
    org.apache.hadoop.hbase.CategoryBasedTimeout;
    -042import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -043import 
    org.apache.hadoop.hbase.HBaseTestingUtility;
    -044import 
    org.apache.hadoop.hbase.NotServingRegionException;
    -045import 
    org.apache.hadoop.hbase.ServerName;
    -046import 
    org.apache.hadoop.hbase.TableName;
    -047import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -048import 
    org.apache.hadoop.hbase.client.RegionInfoBuilder;
    -049import 
    org.apache.hadoop.hbase.client.RetriesExhaustedException;
    -050import 
    org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
    -051import 
    org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
    -052import 
    org.apache.hadoop.hbase.master.MasterServices;
    -053import 
    org.apache.hadoop.hbase.master.RegionState.State;
    -054import 
    org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
    -055import 
    org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
    -056import 
    org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher;
    -057import 
    org.apache.hadoop.hbase.procedure2.Procedure;
    -058import 
    org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
    -059import 
    org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
    -060import 
    org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
    -061import 
    org.apache.hadoop.hbase.procedure2.util.StringUtils;
    -062import 
    org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
    -063import 
    org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
    -064import 
    org.apache.hadoop.hbase.testclassification.MasterTests;
    -065import 
    org.apache.hadoop.hbase.testclassification.MediumTests;
    -066import 
    org.apache.hadoop.hbase.util.Bytes;
    -067import 
    org.apache.hadoop.hbase.util.FSUtils;
    -068import 
    org.apache.hadoop.ipc.RemoteException;
    -069import org.junit.After;
    -070import org.junit.Before;
    -071import org.junit.Ignore;
    -072import org.junit.Rule;
    -073import org.junit.Test;
    -074import 
    org.junit.experimental.categories.Category;
    -075import 
    org.junit.rules.ExpectedException;
    -076import org.junit.rules.TestName;
    -077import org.junit.rules.TestRule;
    -078import org.slf4j.Logger;
    -079import org.slf4j.LoggerFactory;
    -080import 
    org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
    -081import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
    -082import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
    -083import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
    -084import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
    -085import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
    -086import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo;
    -087import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
    -088import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
    -089import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
    -090import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
    -091import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
    -092
    -093@Category({MasterTests.class, 
    MediumTests.class})
    -094public class TestAssignmentManager {
    -095  private static final Logger LOG = 
    LoggerFactory.getLogger(TestAssignmentManager.class);
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.html
    index aef93fa..771d9be 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.html
    @@ -453,7 +453,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     testSelectiveFlushWhenNotEnabled
    -publicvoidtestSelectiveFlushWhenNotEnabled()
    +publicvoidtestSelectiveFlushWhenNotEnabled()
       throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     
     Throws:
    @@ -467,7 +467,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     getRegionWithName
    -private 
    staticorg.apache.hadoop.hbase.util.Pairorg.apache.hadoop.hbase.regionserver.HRegion,org.apache.hadoop.hbase.regionserver.HRegionServergetRegionWithName(org.apache.hadoop.hbase.TableNametableName)
    +private 
    staticorg.apache.hadoop.hbase.util.Pairorg.apache.hadoop.hbase.regionserver.HRegion,org.apache.hadoop.hbase.regionserver.HRegionServergetRegionWithName(org.apache.hadoop.hbase.TableNametableName)
     
     
     
    @@ -476,7 +476,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     doTestLogReplay
    -privatevoiddoTestLogReplay()
    +privatevoiddoTestLogReplay()
       throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
     title="class or interface in java.lang">Exception
     
     Throws:
    @@ -490,7 +490,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     testLogReplayWithDistributedLogSplit
    -publicvoidtestLogReplayWithDistributedLogSplit()
    +publicvoidtestLogReplayWithDistributedLogSplit()
       throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
     title="class or interface in java.lang">Exception
     
     Throws:
    @@ -504,7 +504,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     getWAL
    -privateorg.apache.hadoop.hbase.wal.WALgetWAL(org.apache.hadoop.hbase.regionserver.Regionregion)
    +privateorg.apache.hadoop.hbase.wal.WALgetWAL(org.apache.hadoop.hbase.regionserver.Regionregion)
     
     
     
    @@ -513,7 +513,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     getNumRolledLogFiles
    -privateintgetNumRolledLogFiles(org.apache.hadoop.hbase.regionserver.Regionregion)
    +privateintgetNumRolledLogFiles(org.apache.hadoop.hbase.regionserver.Regionregion)
     
     
     
    @@ -522,7 +522,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     testFlushingWhenLogRolling
    -publicvoidtestFlushingWhenLogRolling()
    +publicvoidtestFlushingWhenLogRolling()
     throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
     title="class or interface in java.lang">Exception
     When a log roll is about to happen, we do a flush of the 
    regions who will be affected by the
      log roll. These flushes cannot be a selective flushes, otherwise we cannot 
    roll the logs. This
    @@ -540,7 +540,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     doPut
    -privatevoiddoPut(org.apache.hadoop.hbase.client.Tabletable,
    +privatevoiddoPut(org.apache.hadoop.hbase.client.Tabletable,
    longmemstoreFlushSize)
     throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException,
    http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
     title="class or interface in java.lang">InterruptedException
    @@ -557,7 +557,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     testCompareStoreFileCount
    -publicvoidtestCompareStoreFileCount()
    +publicvoidtestCompareStoreFileCount()
    throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
     title="class or interface in java.lang">Exception
     
     Throws:
    @@ -571,7 +571,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     main
    -public staticvoidmain(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String[]args)
    +public staticvoidmain(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String[]args)
      throws 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.html 
    b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.html
    index 25be178..19470c7 100644
    --- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.html
    +++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegion.html
    @@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class TestHRegion
    +public class TestHRegion
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     Basic stand-alone testing of HRegion.  No clusters!
     
    @@ -1010,7 +1010,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -1019,7 +1019,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     name
    -publicorg.junit.rules.TestName name
    +publicorg.junit.rules.TestName name
     
     
     
    @@ -1028,7 +1028,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     timeout
    -public static finalorg.junit.rules.TestRule timeout
    +public static finalorg.junit.rules.TestRule timeout
     
     
     
    @@ -1037,7 +1037,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     thrown
    -public finalorg.junit.rules.ExpectedException thrown
    +public finalorg.junit.rules.ExpectedException thrown
     
     
     
    @@ -1046,7 +1046,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     COLUMN_FAMILY
    -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String COLUMN_FAMILY
    +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String COLUMN_FAMILY
     
     See Also:
     Constant
     Field Values
    @@ -1059,7 +1059,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     COLUMN_FAMILY_BYTES
    -private static finalbyte[] COLUMN_FAMILY_BYTES
    +private static finalbyte[] COLUMN_FAMILY_BYTES
     
     
     
    @@ -1068,7 +1068,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     region
    -org.apache.hadoop.hbase.regionserver.HRegion region
    +org.apache.hadoop.hbase.regionserver.HRegion region
     
     
     
    @@ -1077,7 +1077,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     TEST_UTIL
    -protected staticHBaseTestingUtility TEST_UTIL
    +protected staticHBaseTestingUtility TEST_UTIL
     
     
     
    @@ -1086,7 +1086,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     CONF
    -public staticorg.apache.hadoop.conf.Configuration CONF
    +public staticorg.apache.hadoop.conf.Configuration CONF
     
     
     
    @@ -1095,7 +1095,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     dir
    -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String dir
    +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String dir
     
     
     
    @@ -1104,7 +1104,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     FILESYSTEM
    -private staticorg.apache.hadoop.fs.FileSystem FILESYSTEM
    +private staticorg.apache.hadoop.fs.FileSystem FILESYSTEM
     
     
     
    @@ -1113,7 +1113,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     MAX_VERSIONS
    -private finalint MAX_VERSIONS
    +private finalint MAX_VERSIONS
     
     See Also:
     Constant
     Field Values
    @@ -1126,7 +1126,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     tableName
    -protectedorg.apache.hadoop.hbase.TableName tableName
    +protectedorg.apache.hadoop.hbase.TableName tableName
     
     
     
    @@ -1135,7 +1135,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     method
    -protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String method
    +protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String method
     
     
     
    @@ -1144,7 +1144,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     qual
    -protected finalbyte[] qual
    +protected finalbyte[] qual
     
     
     
    @@ -1153,7 +1153,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     qual1
    -protected finalbyte[] qual1
    +protected finalbyte[] qual1
     
     
     
    @@ -1162,7 +1162,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     qual2
    -protected finalbyte[] qual2
    +protected finalbyte[] qual2
     
     
     
    @@ -1171,7 +1171,7 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html 
    b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    index e941c10..c040cf5 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6};
    +var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],4:["t3","Abstract Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public interface MasterServices
    +public interface MasterServices
     extends Server
     A curated subset of services provided by HMaster.
      For use internally only. Passed to Managers, Services and Chores so can pass 
    less-than-a
    @@ -151,7 +151,7 @@ extends 
    -void
    +long
     addReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringpeerId,
       ReplicationPeerConfigpeerConfig,
       booleanenabled)
    @@ -201,7 +201,7 @@ extends 
    -void
    +long
     disableReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringpeerId)
     Stop the replication stream to the specified peer
     
    @@ -215,7 +215,7 @@ extends 
    -void
    +long
     enableReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringpeerId)
     Restart the replication stream to the specified peer
     
    @@ -253,7 +253,7 @@ extends getFavoredNodesManager()
     
     
    -ProcedureEvent
    +ProcedureEvent?
     getInitializedEvent()
     
     
    @@ -327,60 +327,66 @@ extends 
    +ReplicationPeerManager
    +getReplicationPeerManager()
    +Returns the ReplicationPeerManager.
    +
    +
    +
     ServerManager
     getServerManager()
     
    -
    +
     SnapshotManager
     getSnapshotManager()
     
    -
    +
     TableDescriptors
     getTableDescriptors()
     
    -
    +
     TableStateManager
     getTableStateManager()
     
    -
    +
     boolean
     isActiveMaster()
     
    -
    +
     boolean
     isInitialized()
     
    -
    +
     boolean
     isInMaintenanceMode()
     
    -
    +
     boolean
     isServerCrashProcessingEnabled()
     
    -
    +
     boolean
     isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListReplicationPeerDescription
     listReplicationPeers(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringregex)
     Return a list of replication peers.
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTableDescriptor
     listTableDescriptorsByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
     Get list of table descriptors by namespace
     
     
    -
    +
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTableName
     listTableNamesByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
     Get list of table names by namespace
     
     
    -
    +
     long
     mergeRegions(RegionInfo[]regionsToMerge,
     booleanforcible,
    @@ -389,7 +395,7 @@ extends Merge regions in a table.
     
     
    -
    +
     long
     modifyColumn(TableNametableName,
     ColumnFamilyDescriptordescriptor,
    @@ -398,7 +404,7 @@ extends Modify the column descriptor of an existing column in an 
    existing table
     
     
    -
    +
     long
     modifyTable(TableNametableName,
    TableDescriptordescriptor,
    @@ -407,25 +413,25 @@ extends Modify the descriptor of an existing table
     
     
    -
    +
     boolean
     recoverMeta()
     Recover meta table.
     
     

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    index 6077486..dc5f1b8 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
    @@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.LimitedPrivate(value="Configuration")
    -public class StochasticLoadBalancer
    +public class StochasticLoadBalancer
     extends BaseLoadBalancer
     This is a best effort load balancer. Given a Cost 
    function F(C) = x It will
      randomly try and mutate the cluster to Cprime. If F(Cprime)  F(C) then the
    @@ -579,7 +579,7 @@ extends 
     void
    -setClusterStatus(ClusterStatusst)
    +setClusterMetrics(ClusterMetricsst)
     Set the current cluster status.
     
     
    @@ -664,7 +664,7 @@ extends 
     
     STEPS_PER_REGION_KEY
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String STEPS_PER_REGION_KEY
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String STEPS_PER_REGION_KEY
     
     See Also:
     Constant
     Field Values
    @@ -677,7 +677,7 @@ extends 
     
     MAX_STEPS_KEY
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String MAX_STEPS_KEY
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String MAX_STEPS_KEY
     
     See Also:
     Constant
     Field Values
    @@ -690,7 +690,7 @@ extends 
     
     RUN_MAX_STEPS_KEY
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String RUN_MAX_STEPS_KEY
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String RUN_MAX_STEPS_KEY
     
     See Also:
     Constant
     Field Values
    @@ -703,7 +703,7 @@ extends 
     
     MAX_RUNNING_TIME_KEY
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String MAX_RUNNING_TIME_KEY
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String MAX_RUNNING_TIME_KEY
     
     See Also:
     Constant
     Field Values
    @@ -716,7 +716,7 @@ extends 
     
     KEEP_REGION_LOADS
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String KEEP_REGION_LOADS
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String KEEP_REGION_LOADS
     
     See Also:
     Constant
     Field Values
    @@ -729,7 +729,7 @@ extends 
     
     TABLE_FUNCTION_SEP
    -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String TABLE_FUNCTION_SEP
    +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String TABLE_FUNCTION_SEP
     
     See Also:
     Constant
     Field Values
    @@ -742,7 +742,7 @@ extends 
     
     MIN_COST_NEED_BALANCE_KEY
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String MIN_COST_NEED_BALANCE_KEY
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String MIN_COST_NEED_BALANCE_KEY
     
     See Also:
     Constant
     Field Values
    @@ -755,7 +755,7 @@ extends 
     
     RANDOM
    -protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
     title="class or interface in java.util">Random RANDOM
    +protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
     title="class or interface in java.util">Random RANDOM
     
     
     
    @@ -764,7 +764,7 @@ extends 
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -773,7 +773,7 @@ extends 
     
     loads
    -http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
     

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
    index b8e321a..439a50d 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperation.html
    @@ -468,274 +468,216 @@
     460  }
     461
     462  /**
    -463   * Used to gracefully handle fallback 
    to deprecated methods when we
    -464   * evolve coprocessor APIs.
    -465   *
    -466   * When a particular Coprocessor API is 
    updated to change methods, hosts can support fallback
    -467   * to the deprecated API by using this 
    method to determine if an instance implements the new API.
    -468   * In the event that said support is 
    partial, then in the face of a runtime issue that prevents
    -469   * proper operation {@link 
    #legacyWarning(Class, String)} should be used to let operators know.
    -470   *
    -471   * For examples of this in action, see 
    the implementation of
    -472   * ul
    -473   *   li{@link 
    org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost}
    -474   *   li{@link 
    org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost}
    -475   * /ul
    -476   *
    -477   * @param clazz Coprocessor you wish to 
    evaluate
    -478   * @param methodName the name of the 
    non-deprecated method version
    -479   * @param parameterTypes the Class of 
    the non-deprecated method's arguments in the order they are
    -480   * declared.
    -481   */
    -482  @InterfaceAudience.Private
    -483  protected static boolean 
    useLegacyMethod(final Class? extends Coprocessor clazz,
    -484  final String methodName, final 
    Class?... parameterTypes) {
    -485boolean useLegacy;
    -486// Use reflection to see if they 
    implement the non-deprecated version
    -487try {
    -488  clazz.getDeclaredMethod(methodName, 
    parameterTypes);
    -489  LOG.debug("Found an implementation 
    of '" + methodName + "' that uses updated method " +
    -490  "signature. Skipping legacy 
    support for invocations in '" + clazz +"'.");
    -491  useLegacy = false;
    -492} catch (NoSuchMethodException 
    exception) {
    -493  useLegacy = true;
    -494} catch (SecurityException exception) 
    {
    -495  LOG.warn("The Security Manager 
    denied our attempt to detect if the coprocessor '" + clazz +
    -496  "' requires legacy support; 
    assuming it does. If you get later errors about legacy " +
    -497  "coprocessor use, consider 
    updating your security policy to allow access to the package" +
    -498  " and declared members of your 
    implementation.");
    -499  LOG.debug("Details of Security 
    Manager rejection.", exception);
    -500  useLegacy = true;
    +463   * Used to limit legacy handling to 
    once per Coprocessor class per classloader.
    +464   */
    +465  private static final SetClass? 
    extends Coprocessor legacyWarning =
    +466  new 
    ConcurrentSkipListSet(
    +467  new ComparatorClass? 
    extends Coprocessor() {
    +468@Override
    +469public int compare(Class? 
    extends Coprocessor c1, Class? extends Coprocessor c2) {
    +470  if (c1.equals(c2)) {
    +471return 0;
    +472  }
    +473  return 
    c1.getName().compareTo(c2.getName());
    +474}
    +475  });
    +476
    +477  /**
    +478   * Implementations defined function to 
    get an observer of type {@code O} from a coprocessor of
    +479   * type {@code C}. Concrete 
    implementations of CoprocessorHost define one getter for each
    +480   * observer they can handle. For e.g. 
    RegionCoprocessorHost will use 3 getters, one for
    +481   * each of RegionObserver, 
    EndpointObserver and BulkLoadObserver.
    +482   * These getters are used by {@code 
    ObserverOperation} to get appropriate observer from the
    +483   * coprocessor.
    +484   */
    +485  @FunctionalInterface
    +486  public interface ObserverGetterC, 
    O extends FunctionC, OptionalO {}
    +487
    +488  private abstract class 
    ObserverOperationO extends ObserverContextImplE {
    +489ObserverGetterC, O 
    observerGetter;
    +490
    +491
    ObserverOperation(ObserverGetterC, O observerGetter) {
    +492  this(observerGetter, null);
    +493}
    +494
    +495
    ObserverOperation(ObserverGetterC, O observerGetter, User user) {
    +496  this(observerGetter, user, 
    false);
    +497}
    +498
    +499
    ObserverOperation(ObserverGetterC, O observerGetter, boolean 
    bypassable) {
    +500  this(observerGetter, null, 
    bypassable);
     501}
    -502return useLegacy;
    -503  }
    -504
    -505  /**
    -506   * Used to limit legacy handling to 
    once per Coprocessor class per classloader.
    -507   */
    -508  private static final SetClass? 
    extends Coprocessor legacyWarning =
    -509  new 
    ConcurrentSkipListSet(
    -510  

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializerIdManager.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializerIdManager.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializerIdManager.html
    index 726ed31..e8a93c1 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializerIdManager.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializerIdManager.html
    @@ -120,6 +120,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
    index 59ee24f..21dc8c1 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
    @@ -273,6 +273,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/ChecksumUtil.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/ChecksumUtil.html 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/ChecksumUtil.html
    index eb695dc..af30dd2 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/ChecksumUtil.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/ChecksumUtil.html
    @@ -120,6 +120,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.CombinedCacheStats.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.CombinedCacheStats.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.CombinedCacheStats.html
    index eb8d443..c777ee8 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.CombinedCacheStats.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.CombinedCacheStats.html
    @@ -163,6 +163,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.html 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.html
    index 0ee4fea..4fcf7ce 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CombinedBlockCache.html
    @@ -163,6 +163,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CompoundBloomFilter.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CompoundBloomFilter.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CompoundBloomFilter.html
    index c8a5c2b..4671194 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CompoundBloomFilter.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CompoundBloomFilter.html
    @@ -120,6 +120,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CallerWithFailure.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CallerWithFailure.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CallerWithFailure.html
    index bbd91b8..4f76302 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CallerWithFailure.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CallerWithFailure.html
    @@ -56,1641 +56,1753 @@
     048import 
    java.util.concurrent.atomic.AtomicBoolean;
     049import 
    java.util.concurrent.atomic.AtomicInteger;
     050import 
    java.util.concurrent.atomic.AtomicLong;
    -051
    -052import 
    org.apache.hadoop.conf.Configuration;
    -053import 
    org.apache.hadoop.hbase.CallQueueTooBigException;
    -054import 
    org.apache.hadoop.hbase.CategoryBasedTimeout;
    -055import org.apache.hadoop.hbase.Cell;
    -056import 
    org.apache.hadoop.hbase.HConstants;
    -057import 
    org.apache.hadoop.hbase.HRegionInfo;
    -058import 
    org.apache.hadoop.hbase.HRegionLocation;
    -059import 
    org.apache.hadoop.hbase.RegionLocations;
    -060import 
    org.apache.hadoop.hbase.ServerName;
    -061import 
    org.apache.hadoop.hbase.TableName;
    -062import 
    org.apache.hadoop.hbase.client.AsyncProcessTask.ListRowAccess;
    -063import 
    org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows;
    -064import 
    org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
    -065import 
    org.apache.hadoop.hbase.client.backoff.ServerStatistics;
    -066import 
    org.apache.hadoop.hbase.client.coprocessor.Batch;
    -067import 
    org.apache.hadoop.hbase.ipc.RpcControllerFactory;
    -068import 
    org.apache.hadoop.hbase.testclassification.ClientTests;
    -069import 
    org.apache.hadoop.hbase.testclassification.MediumTests;
    -070import 
    org.apache.hadoop.hbase.util.Bytes;
    -071import 
    org.apache.hadoop.hbase.util.Threads;
    -072import org.junit.Assert;
    -073import org.junit.BeforeClass;
    -074import org.junit.Ignore;
    -075import org.junit.Rule;
    -076import org.junit.Test;
    -077import 
    org.junit.experimental.categories.Category;
    -078import org.junit.rules.TestRule;
    -079import org.mockito.Mockito;
    -080import org.slf4j.Logger;
    -081import org.slf4j.LoggerFactory;
    -082
    -083@Category({ClientTests.class, 
    MediumTests.class})
    -084public class TestAsyncProcess {
    -085  @Rule public final TestRule timeout = 
    CategoryBasedTimeout.builder().withTimeout(this.getClass()).
    -086  
    withLookingForStuckThread(true).build();
    -087  private static final Logger LOG = 
    LoggerFactory.getLogger(TestAsyncProcess.class);
    -088  private static final TableName 
    DUMMY_TABLE =
    -089  TableName.valueOf("DUMMY_TABLE");
    -090  private static final byte[] 
    DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1");
    -091  private static final byte[] 
    DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2");
    -092  private static final byte[] 
    DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3");
    -093  private static final byte[] FAILS = 
    Bytes.toBytes("FAILS");
    -094  private static final Configuration CONF 
    = new Configuration();
    -095  private static final 
    ConnectionConfiguration CONNECTION_CONFIG =
    -096  new 
    ConnectionConfiguration(CONF);
    -097  private static final ServerName sn = 
    ServerName.valueOf("s1,1,1");
    -098  private static final ServerName sn2 = 
    ServerName.valueOf("s2,2,2");
    -099  private static final ServerName sn3 = 
    ServerName.valueOf("s3,3,3");
    -100  private static final HRegionInfo hri1 
    =
    -101  new HRegionInfo(DUMMY_TABLE, 
    DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1);
    -102  private static final HRegionInfo hri2 
    =
    -103  new HRegionInfo(DUMMY_TABLE, 
    DUMMY_BYTES_2, HConstants.EMPTY_END_ROW, false, 2);
    -104  private static final HRegionInfo hri3 
    =
    -105  new HRegionInfo(DUMMY_TABLE, 
    DUMMY_BYTES_3, HConstants.EMPTY_END_ROW, false, 3);
    -106  private static final HRegionLocation 
    loc1 = new HRegionLocation(hri1, sn);
    -107  private static final HRegionLocation 
    loc2 = new HRegionLocation(hri2, sn);
    -108  private static final HRegionLocation 
    loc3 = new HRegionLocation(hri3, sn2);
    -109
    -110  // Replica stuff
    -111  private static final RegionInfo hri1r1 
    = RegionReplicaUtil.getRegionInfoForReplica(hri1, 1);
    -112  private static final RegionInfo hri1r2 
    = RegionReplicaUtil.getRegionInfoForReplica(hri1, 2);
    -113  private static final RegionInfo hri2r1 
    = RegionReplicaUtil.getRegionInfoForReplica(hri2, 1);
    -114  private static final RegionLocations 
    hrls1 = new RegionLocations(new HRegionLocation(hri1, sn),
    -115  new HRegionLocation(hri1r1, sn2), 
    new HRegionLocation(hri1r2, sn3));
    -116  private static final RegionLocations 
    hrls2 = new RegionLocations(new HRegionLocation(hri2, sn2),
    -117  new HRegionLocation(hri2r1, 
    sn3));
    -118  private static final RegionLocations 
    hrls3 =
    -119  new RegionLocations(new 
    HRegionLocation(hri3, sn3), null);
    -120
    -121  private static final 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
    b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    index 896f633..0dfdab4 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
    @@ -209,12 +209,12 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse
    -abortProcedure(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllerrpcController,
    +abortProcedure(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllerrpcController,
       
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest)
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse
    -addColumn(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +addColumn(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
      
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequestreq)
     
     
    @@ -224,17 +224,17 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse
    -addReplicationPeer(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +addReplicationPeer(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
       
    org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequestrequest)
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse
    -assignRegion(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +assignRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequestreq)
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse
    -balance(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +balance(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequestrequest)
     
     
    @@ -253,7 +253,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse
    -clearDeadServers(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +clearDeadServers(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequestrequest)
     
     
    @@ -265,7 +265,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse
    -compactRegion(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +compactRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
      
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequestrequest)
     Compact a region on the master.
     
    @@ -280,7 +280,7 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse
    -createNamespace(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +createNamespace(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequestrequest)
     
     
    @@ -297,76 +297,76 @@ implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse
    -createTable(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +createTable(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequestreq)
     
     
     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse
    -decommissionRegionServers(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcControllercontroller,
    +decommissionRegionServers(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
      
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequestrequest)
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/io/HeapSize.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/io/HeapSize.html 
    b/devapidocs/org/apache/hadoop/hbase/io/HeapSize.html
    index 398945e..abef71d 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/HeapSize.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/HeapSize.html
    @@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
     
     
     All Known Implementing Classes:
    -Append, BlockCacheKey, BucketCache, BufferedDataBlockEncoder.OffheapDecodedCell,
     BufferedDataBlockEncoder.OnheapDecodedCell,
     
     ByteBufferChunkCell, ByteBufferKeyValue, CombinedBlockCache, Delete, HFileBlock, HFileBlockIndex.BlockIndexReader, HFileBlockIndex.ByteArrayKeyBlockIndexReader,
     HFileBlockIndex.CellBasedKeyBlockIndexReader,
     HFileContext, HMobStore, HRegion, HStore, InclusiveCombinedBlockCache, Increment, IndividualBytesFieldCell, KeyValue, KeyValue.KeyOnlyKeyValue, LruBlockCache, LruCachedBlock, LruCachedBlockQueue, MapReduceCell, Mutation, Mutation.CellWrapper, NoTagByteBufferChunkCell, NoTagsByteBufferKeyValue, NoTagsKeyValue, PrivateCellUtil.EmptyByteBufferCell, href="../../../../../org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html" 
     >title="class in org.apache.hadoop.hbase">PrivateCellUtil.EmptyCell, href="../../../../../org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html"
     > title="class in 
     >org.apache.hadoop.hbase">PrivateCellUtil.FirstOnRowByteBufferCell, href="../../../../../org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html"
     > title="class in org.apache.hadoop.hbase">PrivateCellUtil.FirstOnRowCell, 
     >href="../../../../../org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColByteBufferCell.html"
     > title="class in 
     >org.apache.hadoop.hbase">PrivateCellUtil.FirstOnRowColByteBufferCell, href="../../../../../org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html"
     > title="class in 
     >org.apache.hadoop.hbase">PrivateCellUtil.FirstOnRowColCell, href="../../../../../org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html"
     > title="class in org.apache.hadoop.hbase">P
     rivateCellUtil.FirstOnRowColTSByteBufferCell, PrivateCellUtil.FirstOnRowColTSCell, PrivateCellUtil.FirstOnRowDeleteFamilyCell, PrivateCellUtil.LastOnRowByteBufferCell, PrivateCellUtil.LastOnRowCell, PrivateCellUtil.LastOnRowColByteBufferCell, PrivateCellUtil.LastOnRowColCell, PrivateCellUtil.TagRewriteByteBufferCell, PrivateCellUtil.TagRewriteCell, 
    PrivateCellUtil.ValueAndTagRewriteByteBufferCell, 
    PrivateCellUtil.ValueAndTagRewriteCell, Put, SizeCachedKeyValue, SizeCachedNoTagsKeyValue, WALEdit, WALSplitter.RegionEntryBuffer
    +Append, BlockCacheKey, BucketCache, BufferedDataBlockEncoder.OffheapDecodedExtendedCell,
     BufferedDataBlockEncoder.OnheapDecodedCell,
     ByteBufferChunkKeyValue, ByteBufferExtendedCell, ByteBufferKeyOnlyKeyValue, ByteBufferKeyValue, CombinedBlockCache, Delete, HFileBlock, HFileBlockIndex.BlockIndexReader, HFileBlockIndex.ByteArrayKeyBlockIndexReader,
     HFileBlockIndex.CellBasedKeyBlockIndexReader,
     HFileContext, HMobStore, HRegion, HStore, InclusiveCombinedBlockCache, Increment, IndividualBytesFieldCell, KeyOnlyFilter.KeyOnlyByteBufferExtendedCell,
     KeyValue, KeyValue.Key
     OnlyKeyValue, LruBlockCache, LruCachedBlock, LruCachedBlockQueue, MapReduceExtendedCell, Mutation, Mutation.CellWrapper, NoTagByteBufferChunkKeyValue, NoTagsByteBufferKeyValue, NoTagsKeyValue, PrivateCellUtil.EmptyByteBufferExtendedCell, PrivateCellUtil.EmptyCell, PrivateCellUtil.FirstOnRowByteBufferExtendedCell, 
    PrivateCellUtil.FirstOnRowCell, PrivateCellUtil.FirstOnRowColByteBufferExtendedCell,
     PrivateCellUtil.FirstOnRowColCell, PrivateCellUtil.FirstOnRowColTSByteBufferExtendedCell,
     PrivateCellUtil.FirstOnRowColTSCell, PrivateCellUtil.FirstOnRo
     wDeleteFamilyCell, PrivateCellUtil.LastOnRowByteBufferExtendedCell, 
    PrivateCellUtil.LastOnRowCell, PrivateCellUtil.LastOnRowColByteBufferExtendedCell,
     PrivateCellUtil.LastOnRowColCell, 
    PrivateCellUtil.TagRewriteByteBufferExtendedCell, 
    PrivateCellUtil.TagRewriteCell, PrivateCellUtil.ValueAndTagRewriteByteBufferExtendedCell,
     PrivateCellUtil.ValueAndTagRewriteCell, Put, SizeCachedKeyValue, SizeCachedNoTagsKeyValue, WALEdit, WALSplitter.RegionEntryBuffer
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html 
    b/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
    index 4817a35..033cb85 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
    @@ -93,36 +93,43 @@
     
     
     
    +org.apache.hadoop.hbase.filter
    +
    +Provides row-level filters applied to HRegion scan results 
    during calls to
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
    index 3400507..2baa140 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
    @@ -28,3034 +28,2926 @@
     020import static 
    org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
     021import static 
    org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
     022
    -023import 
    com.google.common.annotations.VisibleForTesting;
    -024
    -025import java.io.DataOutput;
    -026import java.io.DataOutputStream;
    -027import java.io.IOException;
    -028import java.io.OutputStream;
    -029import java.math.BigDecimal;
    -030import java.nio.ByteBuffer;
    -031import java.util.ArrayList;
    -032import java.util.Iterator;
    -033import java.util.List;
    -034import java.util.Optional;
    -035
    -036import 
    org.apache.hadoop.hbase.KeyValue.Type;
    -037import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    -038import 
    org.apache.hadoop.hbase.io.HeapSize;
    -039import 
    org.apache.hadoop.hbase.io.TagCompressionContext;
    -040import 
    org.apache.hadoop.hbase.io.util.Dictionary;
    -041import 
    org.apache.hadoop.hbase.io.util.StreamUtils;
    -042import 
    org.apache.hadoop.hbase.util.ByteBufferUtils;
    -043import 
    org.apache.hadoop.hbase.util.ByteRange;
    -044import 
    org.apache.hadoop.hbase.util.Bytes;
    -045import 
    org.apache.hadoop.hbase.util.ClassSize;
    -046import 
    org.apache.yetus.audience.InterfaceAudience;
    -047
    -048
    -049/**
    -050 * Utility methods helpful slinging 
    {@link Cell} instances. It has more powerful and
    -051 * rich set of APIs than those in {@link 
    CellUtil} for internal usage.
    -052 */
    -053@InterfaceAudience.Private
    -054public final class PrivateCellUtil {
    -055
    -056  /**
    -057   * Private constructor to keep this 
    class from being instantiated.
    -058   */
    -059  private PrivateCellUtil() {
    -060  }
    +023import java.io.DataOutput;
    +024import java.io.DataOutputStream;
    +025import java.io.IOException;
    +026import java.io.OutputStream;
    +027import java.math.BigDecimal;
    +028import java.nio.ByteBuffer;
    +029import java.util.ArrayList;
    +030import java.util.Iterator;
    +031import java.util.List;
    +032import java.util.Optional;
    +033import 
    org.apache.hadoop.hbase.KeyValue.Type;
    +034import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    +035import 
    org.apache.hadoop.hbase.io.HeapSize;
    +036import 
    org.apache.hadoop.hbase.io.TagCompressionContext;
    +037import 
    org.apache.hadoop.hbase.io.util.Dictionary;
    +038import 
    org.apache.hadoop.hbase.io.util.StreamUtils;
    +039import 
    org.apache.hadoop.hbase.util.ByteBufferUtils;
    +040import 
    org.apache.hadoop.hbase.util.ByteRange;
    +041import 
    org.apache.hadoop.hbase.util.Bytes;
    +042import 
    org.apache.hadoop.hbase.util.ClassSize;
    +043import 
    org.apache.yetus.audience.InterfaceAudience;
    +044
    +045import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
    +046
    +047/**
    +048 * Utility methods helpful slinging 
    {@link Cell} instances. It has more powerful and
    +049 * rich set of APIs than those in {@link 
    CellUtil} for internal usage.
    +050 */
    +051@InterfaceAudience.Private
    +052public final class PrivateCellUtil {
    +053
    +054  /**
    +055   * Private constructor to keep this 
    class from being instantiated.
    +056   */
    +057  private PrivateCellUtil() {
    +058  }
    +059
    +060  /*** ByteRange 
    ***/
     061
    -062  /*** ByteRange 
    ***/
    -063
    -064  public static ByteRange 
    fillRowRange(Cell cell, ByteRange range) {
    -065return range.set(cell.getRowArray(), 
    cell.getRowOffset(), cell.getRowLength());
    -066  }
    -067
    -068  public static ByteRange 
    fillFamilyRange(Cell cell, ByteRange range) {
    -069return 
    range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
    cell.getFamilyLength());
    -070  }
    -071
    -072  public static ByteRange 
    fillQualifierRange(Cell cell, ByteRange range) {
    -073return 
    range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
    -074  cell.getQualifierLength());
    -075  }
    -076
    -077  public static ByteRange 
    fillValueRange(Cell cell, ByteRange range) {
    -078return 
    range.set(cell.getValueArray(), cell.getValueOffset(), 
    cell.getValueLength());
    -079  }
    -080
    -081  public static ByteRange 
    fillTagRange(Cell cell, ByteRange range) {
    -082return range.set(cell.getTagsArray(), 
    cell.getTagsOffset(), cell.getTagsLength());
    -083  }
    +062  public static ByteRange 
    fillRowRange(Cell cell, ByteRange range) {
    +063return range.set(cell.getRowArray(), 
    cell.getRowOffset(), cell.getRowLength());
    +064  }
    +065
    +066  public static ByteRange 
    fillFamilyRange(Cell cell, ByteRange range) {
    +067return 
    range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b618ac40/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
    --
    diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
    index d152a92..8c56813 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
    @@ -34,339 +34,320 @@
     026import java.util.NavigableMap;
     027import java.util.UUID;
     028import org.apache.hadoop.hbase.Cell;
    -029import 
    org.apache.hadoop.hbase.CellUtil;
    -030import 
    org.apache.hadoop.hbase.HConstants;
    -031import 
    org.apache.hadoop.hbase.IndividualBytesFieldCell;
    -032import 
    org.apache.hadoop.hbase.KeyValue;
    -033import 
    org.apache.hadoop.hbase.io.HeapSize;
    -034import 
    org.apache.hadoop.hbase.security.access.Permission;
    -035import 
    org.apache.hadoop.hbase.security.visibility.CellVisibility;
    -036import 
    org.apache.hadoop.hbase.util.Bytes;
    -037import 
    org.apache.yetus.audience.InterfaceAudience;
    -038
    -039/**
    -040 * Used to perform Put operations for a 
    single row.
    -041 * p
    -042 * To perform a Put, instantiate a Put 
    object with the row to insert to, and
    -043 * for each column to be inserted, 
    execute {@link #addColumn(byte[], byte[],
    -044 * byte[]) add} or {@link 
    #addColumn(byte[], byte[], long, byte[]) add} if
    -045 * setting the timestamp.
    -046 */
    -047@InterfaceAudience.Public
    -048public class Put extends Mutation 
    implements HeapSize, ComparableRow {
    -049  /**
    -050   * Create a Put operation for the 
    specified row.
    -051   * @param row row key
    -052   */
    -053  public Put(byte [] row) {
    -054this(row, 
    HConstants.LATEST_TIMESTAMP);
    -055  }
    -056
    -057  /**
    -058   * Create a Put operation for the 
    specified row, using a given timestamp.
    -059   *
    -060   * @param row row key; we make a copy 
    of what we are passed to keep local.
    -061   * @param ts timestamp
    -062   */
    -063  public Put(byte[] row, long ts) {
    -064this(row, 0, row.length, ts);
    -065  }
    -066
    -067  /**
    -068   * We make a copy of the passed in row 
    key to keep local.
    -069   * @param rowArray
    -070   * @param rowOffset
    -071   * @param rowLength
    -072   */
    -073  public Put(byte [] rowArray, int 
    rowOffset, int rowLength) {
    -074this(rowArray, rowOffset, rowLength, 
    HConstants.LATEST_TIMESTAMP);
    -075  }
    -076
    -077  /**
    -078   * @param row row key; we make a copy 
    of what we are passed to keep local.
    -079   * @param ts  timestamp
    -080   */
    -081  public Put(ByteBuffer row, long ts) {
    -082if (ts  0) {
    -083  throw new 
    IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
    -084}
    -085checkRow(row);
    -086this.row = new 
    byte[row.remaining()];
    -087row.get(this.row);
    -088this.ts = ts;
    -089  }
    -090
    -091  /**
    -092   * @param row row key; we make a copy 
    of what we are passed to keep local.
    -093   */
    -094  public Put(ByteBuffer row) {
    -095this(row, 
    HConstants.LATEST_TIMESTAMP);
    -096  }
    -097
    -098  /**
    -099   * We make a copy of the passed in row 
    key to keep local.
    -100   * @param rowArray
    -101   * @param rowOffset
    -102   * @param rowLength
    -103   * @param ts
    -104   */
    -105  public Put(byte [] rowArray, int 
    rowOffset, int rowLength, long ts) {
    -106checkRow(rowArray, rowOffset, 
    rowLength);
    -107this.row = Bytes.copy(rowArray, 
    rowOffset, rowLength);
    -108this.ts = ts;
    -109if (ts  0) {
    -110  throw new 
    IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
    -111}
    -112  }
    -113
    -114  /**
    -115   * Create a Put operation for an 
    immutable row key.
    -116   *
    -117   * @param row row key
    -118   * @param rowIsImmutable whether the 
    input row is immutable.
    -119   *   Set to true if 
    the caller can guarantee that
    -120   *   the row will 
    not be changed for the Put duration.
    -121   */
    -122  public Put(byte [] row, boolean 
    rowIsImmutable) {
    -123this(row, 
    HConstants.LATEST_TIMESTAMP, rowIsImmutable);
    -124  }
    -125
    -126  /**
    -127   * Create a Put operation for an 
    immutable row key, using a given timestamp.
    -128   *
    -129   * @param row row key
    -130   * @param ts timestamp
    -131   * @param rowIsImmutable whether the 
    input row is immutable.
    -132   *   Set to true if 
    the caller can guarantee that
    -133   *   the row will 
    not be changed for the Put duration.
    -134   */
    -135  public Put(byte[] row, long ts, boolean 
    rowIsImmutable) {
    -136// Check and set timestamp
    -137if (ts  0) {
    -138  throw new 
    IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
    -139}
    -140this.ts = ts;
    -141
    -142// Deal with row according to 
    rowIsImmutable
    -143checkRow(row);
    -144if (rowIsImmutable) {  // Row is 
    immutable
    -145  this.row = row;  // Do not make a 
    local copy, but point to the provided byte array directly
    -146} else {  // Row is not immutable
    -147  this.row = Bytes.copy(row, 0, 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7c0589c0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.SplitTableRegionFuture.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.SplitTableRegionFuture.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.SplitTableRegionFuture.html
    index 6fecbc9..2accda0 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.SplitTableRegionFuture.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.SplitTableRegionFuture.html
    @@ -34,4140 +34,4141 @@
     026import 
    java.nio.charset.StandardCharsets;
     027import java.util.ArrayList;
     028import java.util.Arrays;
    -029import java.util.Collection;
    -030import java.util.EnumSet;
    -031import java.util.HashMap;
    -032import java.util.Iterator;
    -033import java.util.LinkedList;
    -034import java.util.List;
    -035import java.util.Map;
    -036import java.util.Set;
    -037import java.util.concurrent.Callable;
    -038import 
    java.util.concurrent.ExecutionException;
    -039import java.util.concurrent.Future;
    -040import java.util.concurrent.TimeUnit;
    -041import 
    java.util.concurrent.TimeoutException;
    -042import 
    java.util.concurrent.atomic.AtomicInteger;
    -043import 
    java.util.concurrent.atomic.AtomicReference;
    -044import java.util.regex.Pattern;
    -045import java.util.stream.Collectors;
    -046import java.util.stream.Stream;
    -047import 
    org.apache.hadoop.conf.Configuration;
    -048import 
    org.apache.hadoop.hbase.Abortable;
    -049import 
    org.apache.hadoop.hbase.CacheEvictionStats;
    -050import 
    org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
    -051import 
    org.apache.hadoop.hbase.ClusterMetrics.Option;
    -052import 
    org.apache.hadoop.hbase.ClusterStatus;
    -053import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -054import 
    org.apache.hadoop.hbase.HBaseConfiguration;
    -055import 
    org.apache.hadoop.hbase.HConstants;
    -056import 
    org.apache.hadoop.hbase.HRegionInfo;
    -057import 
    org.apache.hadoop.hbase.HRegionLocation;
    -058import 
    org.apache.hadoop.hbase.HTableDescriptor;
    -059import 
    org.apache.hadoop.hbase.MasterNotRunningException;
    -060import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -061import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -062import 
    org.apache.hadoop.hbase.NamespaceNotFoundException;
    -063import 
    org.apache.hadoop.hbase.NotServingRegionException;
    -064import 
    org.apache.hadoop.hbase.RegionLoad;
    -065import 
    org.apache.hadoop.hbase.RegionLocations;
    -066import 
    org.apache.hadoop.hbase.ServerName;
    -067import 
    org.apache.hadoop.hbase.TableExistsException;
    -068import 
    org.apache.hadoop.hbase.TableName;
    -069import 
    org.apache.hadoop.hbase.TableNotDisabledException;
    -070import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -071import 
    org.apache.hadoop.hbase.UnknownRegionException;
    -072import 
    org.apache.hadoop.hbase.ZooKeeperConnectionException;
    -073import 
    org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
    -074import 
    org.apache.hadoop.hbase.client.replication.TableCFs;
    -075import 
    org.apache.hadoop.hbase.client.security.SecurityCapability;
    -076import 
    org.apache.hadoop.hbase.exceptions.TimeoutIOException;
    -077import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    -078import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -079import 
    org.apache.hadoop.hbase.ipc.HBaseRpcController;
    -080import 
    org.apache.hadoop.hbase.ipc.RpcControllerFactory;
    -081import 
    org.apache.hadoop.hbase.quotas.QuotaFilter;
    -082import 
    org.apache.hadoop.hbase.quotas.QuotaRetriever;
    -083import 
    org.apache.hadoop.hbase.quotas.QuotaSettings;
    -084import 
    org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
    -085import 
    org.apache.hadoop.hbase.replication.ReplicationException;
    -086import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
    -087import 
    org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
    -088import 
    org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
    -089import 
    org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
    -090import 
    org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
    -091import 
    org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
    -092import 
    org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
    -093import 
    org.apache.hadoop.hbase.util.Addressing;
    -094import 
    org.apache.hadoop.hbase.util.Bytes;
    -095import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -096import 
    org.apache.hadoop.hbase.util.ForeignExceptionUtil;
    -097import 
    org.apache.hadoop.hbase.util.Pair;
    -098import 
    org.apache.hadoop.ipc.RemoteException;
    -099import 
    org.apache.hadoop.util.StringUtils;
    -100import 
    org.apache.yetus.audience.InterfaceAudience;
    -101import 
    org.apache.yetus.audience.InterfaceStability;
    -102import org.slf4j.Logger;
    -103import org.slf4j.LoggerFactory;
    -104
    -105import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
    -106import 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/devapidocs/org/apache/hadoop/hbase/ClusterId.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ClusterId.html 
    b/devapidocs/org/apache/hadoop/hbase/ClusterId.html
    index 85e4cb0..7174a4f 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ClusterId.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ClusterId.html
    @@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
     
     
     PrevClass
    -NextClass
    +NextClass
     
     
     Frames
    @@ -365,7 +365,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     PrevClass
    -NextClass
    +NextClass
     
     
     Frames
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/devapidocs/org/apache/hadoop/hbase/ClusterMetrics.Option.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ClusterMetrics.Option.html 
    b/devapidocs/org/apache/hadoop/hbase/ClusterMetrics.Option.html
    new file mode 100644
    index 000..7577a21
    --- /dev/null
    +++ b/devapidocs/org/apache/hadoop/hbase/ClusterMetrics.Option.html
    @@ -0,0 +1,473 @@
    +http://www.w3.org/TR/html4/loose.dtd;>
    +
    +
    +
    +
    +
    +ClusterMetrics.Option (Apache HBase 3.0.0-SNAPSHOT API)
    +
    +
    +
    +
    +
    +var methods = {"i0":9,"i1":9};
    +var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],8:["t4","Concrete Methods"]};
    +var altColor = "altColor";
    +var rowColor = "rowColor";
    +var tableTab = "tableTab";
    +var activeTableTab = "activeTableTab";
    +
    +
    +JavaScript is disabled on your browser.
    +
    +
    +
    +
    +
    +Skip navigation links
    +
    +
    +
    +
    +Overview
    +Package
    +Class
    +Use
    +Tree
    +Deprecated
    +Index
    +Help
    +
    +
    +
    +
    +PrevClass
    +NextClass
    +
    +
    +Frames
    +NoFrames
    +
    +
    +AllClasses
    +
    +
    +
    +
    +
    +
    +
    +Summary:
    +Nested|
    +Enum Constants|
    +Field|
    +Method
    +
    +
    +Detail:
    +Enum Constants|
    +Field|
    +Method
    +
    +
    +
    +
    +
    +
    +
    +
    +org.apache.hadoop.hbase
    +Enum 
    ClusterMetrics.Option
    +
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">java.lang.EnumClusterMetrics.Option
    +
    +
    +org.apache.hadoop.hbase.ClusterMetrics.Option
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +All Implemented Interfaces:
    +http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
     title="class or interface in java.io">Serializable, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
     title="class or interface in java.lang">ComparableClusterMetrics.Option
    +
    +
    +Enclosing interface:
    +ClusterMetrics
    +
    +
    +
    +public static enum ClusterMetrics.Option
    +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">EnumClusterMetrics.Option
    +Kinds of ClusterMetrics
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Enum Constant Summary
    +
    +Enum Constants
    +
    +Enum Constant and Description
    +
    +
    +BACKUP_MASTERS
    +metrics about backup masters name
    +
    +
    +
    +BALANCER_ON
    +metrics about balancer is on or not
    +
    +
    +
    +CLUSTER_ID
    +metrics about cluster id
    +
    +
    +
    +DEAD_SERVERS
    +metrics about dead region servers
    +
    +
    +
    +HBASE_VERSION
    +metrics about hbase version
    +
    +
    +
    +LIVE_SERVERS
    +metrics about live region servers
    +
    +
    +
    +MASTER
    +metrics about master name
    +
    +
    +
    +MASTER_COPROCESSORS
    +metrics about master coprocessors
    +
    +
    +
    +MASTER_INFO_PORT
    +metrics info port
    +
    +
    +
    +REGIONS_IN_TRANSITION
    +metrics about regions in transition
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Method Summary
    +
    +All MethodsStatic MethodsConcrete Methods
    +
    +Modifier and Type
    +Method and Description
    +
    +
    +static ClusterMetrics.Option
    +valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
    +Returns the enum constant of this type with the specified 
    name.
    +
    +
    +
    +static ClusterMetrics.Option[]
    +values()
    +Returns an array containing the constants of this enum 
    type, in
    +the order they are declared.
    +
    +
    +
    +
    +
    +
    +
    +Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">Enum
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#compareTo-E-;
     title="class or interface in java.lang">compareTo, 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html 
    b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html
    index 422ec86..350b409 100644
    --- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html
    +++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.html
    @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public class MetaTableAccessor
    +public class MetaTableAccessor
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     Read/write operations on region and assignment information 
    store in
      hbase:meta.
    @@ -197,7 +197,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     daughterNameCq
     
     
    -private static 
    org.apache.commons.logging.Log
    +private static org.slf4j.Logger
     LOG
     
     
    @@ -211,7 +211,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     
    -private static 
    org.apache.commons.logging.Log
    +private static org.slf4j.Logger
     METALOG
     
     
    @@ -1053,7 +1053,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     LOG
    -private static finalorg.apache.commons.logging.Log LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -1062,7 +1062,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     METALOG
    -private static finalorg.apache.commons.logging.Log METALOG
    +private static finalorg.slf4j.Logger METALOG
     
     
     
    @@ -1071,7 +1071,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     daughterNameCq
    -private static finalbyte[] daughterNameCq
    +private static finalbyte[] daughterNameCq
     
     
     
    @@ -1080,7 +1080,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     parentNameCq
    -private static finalbyte[] parentNameCq
    +private static finalbyte[] parentNameCq
     
     
     
    @@ -1089,7 +1089,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     tableNameCq
    -private static finalbyte[] tableNameCq
    +private static finalbyte[] tableNameCq
     
     
     
    @@ -1098,7 +1098,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     META_REGION_PREFIX
    -static finalbyte[] META_REGION_PREFIX
    +static finalbyte[] META_REGION_PREFIX
     
     
     
    @@ -1107,7 +1107,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     META_REPLICA_ID_DELIMITER
    -protected static finalchar META_REPLICA_ID_DELIMITER
    +protected static finalchar META_REPLICA_ID_DELIMITER
     The delimiter for meta columns for replicaIds  0
     
     See Also:
    @@ -1121,7 +1121,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     SERVER_COLUMN_PATTERN
    -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
     title="class or interface in java.util.regex">Pattern SERVER_COLUMN_PATTERN
    +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
     title="class or interface in java.util.regex">Pattern SERVER_COLUMN_PATTERN
     A regex for parsing server columns from meta. See above 
    javadoc for meta layout
     
     
    @@ -1139,7 +1139,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     MetaTableAccessor
    -publicMetaTableAccessor()
    +publicMetaTableAccessor()
     
     
     
    @@ -1157,7 +1157,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     allTableRegions
     http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
     title="class or interface in java.lang">@Deprecated
    -public statichttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
     title="class or interface in java.util">NavigableMapRegionInfo,ServerNameallTableRegions(Connectionconnection,
    +public statichttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
     title="class or interface in java.util">NavigableMapRegionInfo,ServerNameallTableRegions(Connectionconnection,
       
    TableNametableName)
    throws 
    http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Deprecated.use getTableRegionsAndLocations(org.apache.hadoop.hbase.client.Connection,
     org.apache.hadoop.hbase.TableName), region can have multiple 
    locations
    @@ -1180,7 +1180,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/N
     
     
     fullScanRegions
    -public staticvoidfullScanRegions(Connectionconnection,
    +public staticvoidfullScanRegions(Connectionconnection,
    MetaTableAccessor.Visitorvisitor)
      

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedInt8.html
    --
    diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedInt8.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedInt8.html
    index fbcf9a9..8a04689 100644
    --- a/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedInt8.html
    +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedInt8.html
    @@ -25,58 +25,67 @@
     017 */
     018package org.apache.hadoop.hbase.types;
     019
    -020import 
    org.apache.yetus.audience.InterfaceAudience;
    -021import 
    org.apache.hadoop.hbase.util.Order;
    -022import 
    org.apache.hadoop.hbase.util.OrderedBytes;
    -023import 
    org.apache.hadoop.hbase.util.PositionedByteRange;
    +020import 
    org.apache.hadoop.hbase.util.Order;
    +021import 
    org.apache.hadoop.hbase.util.OrderedBytes;
    +022import 
    org.apache.hadoop.hbase.util.PositionedByteRange;
    +023import 
    org.apache.yetus.audience.InterfaceAudience;
     024
    -025
    -026/**
    -027 * A {@code byte} of 8-bits using a 
    fixed-length encoding. Built on
    -028 * {@link 
    OrderedBytes#encodeInt8(PositionedByteRange, byte, Order)}.
    -029 */
    -030@InterfaceAudience.Public
    -031public class OrderedInt8 extends 
    OrderedBytesBaseByte {
    -032
    -033  public static final OrderedInt8 
    ASCENDING = new OrderedInt8(Order.ASCENDING);
    -034  public static final OrderedInt8 
    DESCENDING = new OrderedInt8(Order.DESCENDING);
    -035
    -036  protected OrderedInt8(Order order) { 
    super(order); }
    -037
    -038  @Override
    -039  public boolean isNullable() { return 
    false; }
    -040
    -041  @Override
    -042  public int encodedLength(Byte val) { 
    return 2; }
    +025/**
    +026 * A {@code byte} of 8-bits using a 
    fixed-length encoding. Built on
    +027 * {@link 
    OrderedBytes#encodeInt8(PositionedByteRange, byte, Order)}.
    +028 */
    +029@InterfaceAudience.Public
    +030public class OrderedInt8 extends 
    OrderedBytesBaseByte {
    +031
    +032  public static final OrderedInt8 
    ASCENDING = new OrderedInt8(Order.ASCENDING);
    +033  public static final OrderedInt8 
    DESCENDING = new OrderedInt8(Order.DESCENDING);
    +034
    +035  protected OrderedInt8(Order order) {
    +036super(order);
    +037  }
    +038
    +039  @Override
    +040  public boolean isNullable() {
    +041return false;
    +042  }
     043
     044  @Override
    -045  public ClassByte encodedClass() 
    { return Byte.class; }
    -046
    -047  @Override
    -048  public Byte decode(PositionedByteRange 
    src) {
    -049return 
    OrderedBytes.decodeInt8(src);
    -050  }
    -051
    -052  @Override
    -053  public int encode(PositionedByteRange 
    dst, Byte val) {
    -054if (null == val) throw new 
    IllegalArgumentException("Null values not supported.");
    -055return OrderedBytes.encodeInt8(dst, 
    val, order);
    -056  }
    -057
    -058  /**
    -059   * Read a {@code byte} value from the 
    buffer {@code src}.
    -060   */
    -061  public byte 
    decodeByte(PositionedByteRange src) {
    -062return 
    OrderedBytes.decodeInt8(src);
    -063  }
    -064
    -065  /**
    -066   * Write instance {@code val} into 
    buffer {@code dst}.
    -067   */
    -068  public int 
    encodeByte(PositionedByteRange dst, byte val) {
    -069return OrderedBytes.encodeInt8(dst, 
    val, order);
    -070  }
    -071}
    +045  public int encodedLength(Byte val) {
    +046return 2;
    +047  }
    +048
    +049  @Override
    +050  public ClassByte encodedClass() 
    {
    +051return Byte.class;
    +052  }
    +053
    +054  @Override
    +055  public Byte decode(PositionedByteRange 
    src) {
    +056return 
    OrderedBytes.decodeInt8(src);
    +057  }
    +058
    +059  @Override
    +060  public int encode(PositionedByteRange 
    dst, Byte val) {
    +061if (null == val) {
    +062  throw new 
    IllegalArgumentException("Null values not supported.");
    +063}
    +064return OrderedBytes.encodeInt8(dst, 
    val, order);
    +065  }
    +066
    +067  /**
    +068   * Read a {@code byte} value from the 
    buffer {@code src}.
    +069   */
    +070  public byte 
    decodeByte(PositionedByteRange src) {
    +071return 
    OrderedBytes.decodeInt8(src);
    +072  }
    +073
    +074  /**
    +075   * Write instance {@code val} into 
    buffer {@code dst}.
    +076   */
    +077  public int 
    encodeByte(PositionedByteRange dst, byte val) {
    +078return OrderedBytes.encodeInt8(dst, 
    val, order);
    +079  }
    +080}
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedNumeric.html
    --
    diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedNumeric.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedNumeric.html
    index 976627c..a228022 100644
    --- a/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedNumeric.html
    +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/OrderedNumeric.html
    @@ -28,11 +28,11 @@
     020import java.math.BigDecimal;
     021import java.math.BigInteger;
     022
    -023import 
    org.apache.yetus.audience.InterfaceAudience;
    -024import 
    org.apache.hadoop.hbase.util.Order;
    -025import 
    org.apache.hadoop.hbase.util.OrderedBytes;
    -026import 
    org.apache.hadoop.hbase.util.PositionedByteRange;
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/src-html/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html
    index 0477364..2dd11c9 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.html
    @@ -43,817 +43,826 @@
     035import org.apache.commons.logging.Log;
     036import 
    org.apache.commons.logging.LogFactory;
     037import 
    org.apache.hadoop.conf.Configuration;
    -038import 
    org.apache.hadoop.hbase.HBaseIOException;
    -039import 
    org.apache.hadoop.hbase.HConstants;
    -040import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -041import 
    org.apache.hadoop.hbase.ServerName;
    -042import 
    org.apache.hadoop.hbase.TableName;
    -043import 
    org.apache.hadoop.hbase.client.Connection;
    -044import 
    org.apache.hadoop.hbase.client.ConnectionFactory;
    -045import 
    org.apache.hadoop.hbase.client.Put;
    -046import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -047import 
    org.apache.hadoop.hbase.client.Table;
    -048import 
    org.apache.hadoop.hbase.master.RackManager;
    -049import 
    org.apache.hadoop.hbase.util.Bytes;
    -050import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -051import 
    org.apache.yetus.audience.InterfaceAudience;
    -052
    -053import 
    org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
    -054import 
    org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
    -055import 
    org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
    -056import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
    -057import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes;
    -058
    -059/**
    -060 * Helper class for {@link 
    FavoredNodeLoadBalancer} that has all the intelligence for racks,
    -061 * meta scans, etc. Instantiated by the 
    {@link FavoredNodeLoadBalancer} when needed (from
    -062 * within calls like {@link 
    FavoredNodeLoadBalancer#randomAssignment(RegionInfo, List)}).
    -063 * All updates to favored nodes should 
    only be done from {@link FavoredNodesManager} and not
    -064 * through this helper class (except for 
    tests).
    -065 */
    -066@InterfaceAudience.Private
    -067public class FavoredNodeAssignmentHelper 
    {
    -068  private static final Log LOG = 
    LogFactory.getLog(FavoredNodeAssignmentHelper.class);
    -069  private RackManager rackManager;
    -070  private MapString, 
    ListServerName rackToRegionServerMap;
    -071  private ListString 
    uniqueRackList;
    -072  // This map serves as a cache for rack 
    to sn lookups. The num of
    -073  // region server entries might not 
    match with that is in servers.
    -074  private MapString, String 
    regionServerToRackMap;
    -075  private Random random;
    -076  private ListServerName 
    servers;
    -077  public static final byte [] 
    FAVOREDNODES_QUALIFIER = Bytes.toBytes("fn");
    -078  public final static short 
    FAVORED_NODES_NUM = 3;
    -079  public final static short 
    MAX_ATTEMPTS_FN_GENERATION = 10;
    -080
    -081  public 
    FavoredNodeAssignmentHelper(final ListServerName servers, Configuration 
    conf) {
    -082this(servers, new 
    RackManager(conf));
    -083  }
    -084
    -085  public 
    FavoredNodeAssignmentHelper(final ListServerName servers,
    -086  final RackManager rackManager) {
    -087this.servers = servers;
    -088this.rackManager = rackManager;
    -089this.rackToRegionServerMap = new 
    HashMap();
    -090this.regionServerToRackMap = new 
    HashMap();
    -091this.uniqueRackList = new 
    ArrayList();
    -092this.random = new Random();
    -093  }
    -094
    -095  // Always initialize() when 
    FavoredNodeAssignmentHelper is constructed.
    -096  public void initialize() {
    -097for (ServerName sn : this.servers) 
    {
    -098  String rackName = 
    getRackOfServer(sn);
    -099  ListServerName serverList = 
    this.rackToRegionServerMap.get(rackName);
    -100  if (serverList == null) {
    -101serverList = 
    Lists.newArrayList();
    -102// Add the current rack to the 
    unique rack list
    -103
    this.uniqueRackList.add(rackName);
    -104
    this.rackToRegionServerMap.put(rackName, serverList);
    -105  }
    -106  for (ServerName serverName : 
    serverList) {
    -107if (ServerName.isSameAddress(sn, 
    serverName)) {
    -108  // The server is already 
    present, ignore.
    -109  break;
    -110}
    -111  }
    -112  serverList.add((sn));
    -113  
    this.regionServerToRackMap.put(sn.getHostname(), rackName);
    -114}
    -115  }
    -116
    -117  /**
    -118   * Update meta table with favored nodes 
    info
    -119   * @param regionToFavoredNodes map of 
    RegionInfo's to their favored nodes
    -120   * @param connection connection to be 
    used
    -121   * @throws IOException
    -122   */
    -123  public static void 
    updateMetaWithFavoredNodesInfo(
    -124  MapRegionInfo, 
    ListServerName regionToFavoredNodes,
    -125  

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
    --
    diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
    index 7c59e27..c904c56 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.html
    @@ -119,4048 +119,4054 @@
     111import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
     112import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
     113import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
    -114import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
    -115import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
    -116import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
    -117import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
    -118import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
    -119import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
    -120import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
    -121import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
    -122import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
    -123import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
    -124import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
    -125import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
    -126import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
    -127import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
    -128import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
    -129import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
    -130import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
    -131import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
    -132import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
    -133import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
    -134import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
    -135import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
    -136import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
    -137import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
    -138import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
    -139import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
    -140import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
    -141import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
    -142import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
    -143import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
    -144import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
    -145import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
    -146import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
    -147import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
    -148import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
    -149import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
    -150import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
    -151import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
    -152import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
    -153import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
    -154import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
    -155import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
    -156import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;
    -157import 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
    index 915e78a..abee553 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
    @@ -36,8 +36,8 @@
     028import java.util.regex.Pattern;
     029
     030import 
    org.apache.commons.lang3.ArrayUtils;
    -031import 
    org.apache.yetus.audience.InterfaceAudience;
    -032import 
    org.apache.hadoop.hbase.util.Bytes;
    +031import 
    org.apache.hadoop.hbase.util.Bytes;
    +032import 
    org.apache.yetus.audience.InterfaceAudience;
     033
     034/**
     035 * HConstants holds a bunch of 
    HBase-related constants
    @@ -558,786 +558,800 @@
     550  /**
     551   * Timestamp to use when we want to 
    refer to the latest cell.
     552   *
    -553   * On client side, this is the 
    timestamp set by default when no timestamp is specified, to refer to the 
    latest.
    -554   * On server side, this acts as a 
    notation.
    -555   * (1) For a cell of Put, which has 
    this notation,
    -556   * its timestamp will be replaced 
    with server's current time.
    -557   * (2) For a cell of Delete, which has 
    this notation,
    -558   * A. If the cell is of {@link 
    KeyValue.Type#Delete}, HBase issues a Get operation firstly.
    -559   *a. When the count of cell it 
    gets is less than the count of cell to delete,
    -560   *   the timestamp of Delete 
    cell will be replaced with server's current time.
    -561   *b. When the count of cell it 
    gets is equal to the count of cell to delete,
    -562   *   the timestamp of Delete 
    cell will be replaced with the latest timestamp of cell it gets.
    -563   *   (c. It is invalid and an 
    exception will be thrown,
    -564   *   if the count of cell it 
    gets is greater than the count of cell to delete,
    -565   *   as the max version of Get 
    is set to the count of cell to delete.)
    -566   * B. If the cell is of other 
    Delete types, like {@link KeyValue.Type#DeleteFamilyVersion},
    -567   *{@link 
    KeyValue.Type#DeleteColumn}, or {@link KeyValue.Type#DeleteFamily},
    -568   *the timestamp of Delete cell 
    will be replaced with server's current time.
    -569   *
    -570   * So that is why it is named as 
    "latest" but assigned as the max value of Long.
    -571   */
    -572  public static final long 
    LATEST_TIMESTAMP = Long.MAX_VALUE;
    -573
    -574  /**
    -575   * Timestamp to use when we want to 
    refer to the oldest cell.
    -576   * Special! Used in fake Cells only. 
    Should never be the timestamp on an actual Cell returned to
    -577   * a client.
    -578   * @deprecated Should not be public 
    since hbase-1.3.0. For internal use only. Move internal to
    -579   * Scanners flagged as special 
    timestamp value never to be returned as timestamp on a Cell.
    -580   */
    -581  @Deprecated
    -582  public static final long 
    OLDEST_TIMESTAMP = Long.MIN_VALUE;
    -583
    -584  /**
    -585   * LATEST_TIMESTAMP in bytes form
    -586   */
    -587  public static final byte [] 
    LATEST_TIMESTAMP_BYTES = {
    -588// big-endian
    -589(byte) (LATEST_TIMESTAMP  
    56),
    -590(byte) (LATEST_TIMESTAMP  
    48),
    -591(byte) (LATEST_TIMESTAMP  
    40),
    -592(byte) (LATEST_TIMESTAMP  
    32),
    -593(byte) (LATEST_TIMESTAMP  
    24),
    -594(byte) (LATEST_TIMESTAMP  
    16),
    -595(byte) (LATEST_TIMESTAMP  
    8),
    -596(byte) LATEST_TIMESTAMP,
    -597  };
    -598
    -599  /**
    -600   * Define for 'return-all-versions'.
    -601   */
    -602  public static final int ALL_VERSIONS = 
    Integer.MAX_VALUE;
    -603
    -604  /**
    -605   * Unlimited time-to-live.
    -606   */
    -607//  public static final int FOREVER = 
    -1;
    -608  public static final int FOREVER = 
    Integer.MAX_VALUE;
    -609
    -610  /**
    -611   * Seconds in a day, hour and minute
    -612   */
    -613  public static final int DAY_IN_SECONDS 
    = 24 * 60 * 60;
    -614  public static final int HOUR_IN_SECONDS 
    = 60 * 60;
    -615  public static final int 
    MINUTE_IN_SECONDS = 60;
    -616
    -617  //TODO: although the following are 
    referenced widely to format strings for
    -618  //  the shell. They really aren't a 
    part of the public API. It would be
    -619  //  nice if we could put them 
    somewhere where they did not need to be
    -620  //  public. They could have package 
    visibility
    -621  public static final String NAME = 
    "NAME";
    -622  public static final String VERSIONS = 
    "VERSIONS";
    -623  public static final String IN_MEMORY = 
    "IN_MEMORY";
    -624  public static final String METADATA = 
    "METADATA";
    -625  public static final String 
    CONFIGURATION = "CONFIGURATION";
    -626
    -627  /**
    -628   * Retrying we multiply 
    hbase.client.pause setting by what we have in this array until we
    -629   * run out of array items.  Retries 
    beyond this use the last number in the array.  So, for
    -630   * 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
     
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
    index b3abbfe..5d768c7 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html
    @@ -204,154 +204,150 @@
     
     
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
    -ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setCacheDataInL1(booleanvalue)
    -
    -
    -ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setCacheDataOnWrite(booleanvalue)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setCacheIndexesOnWrite(booleanvalue)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setCompactionCompressionType(Compression.Algorithmtype)
     Compression types supported in hbase.
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setCompressionType(Compression.Algorithmtype)
     Compression types supported in hbase.
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setCompressTags(booleancompressTags)
     Set whether the tags should be compressed along with 
    DataBlockEncoding.
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringkey,
     http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringvalue)
     Setter for storing a configuration setting in configuration
     map.
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setDataBlockEncoding(DataBlockEncodingtype)
     Set data block encoding algorithm used in block cache.
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setDFSReplication(shortreplication)
     Set the replication factor to hfile(s) belonging to this 
    family
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setEncryptionKey(byte[]keyBytes)
     Set the raw crypto key attribute for the family
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setEncryptionType(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringalgorithm)
     Set the encryption algorithm for use with this family
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setEvictBlocksOnClose(booleanvalue)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setInMemory(booleaninMemory)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setInMemoryCompaction(MemoryCompactionPolicyinMemoryCompaction)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setKeepDeletedCells(KeepDeletedCellskeepDeletedCells)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setMaxVersions(intmaxVersions)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setMinVersions(intminVersions)
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.setMobCompactPartitionPolicy(MobCompactPartitionPolicypolicy)
     Set the mob compact partition policy for the family.
     
     
    -
    +
     ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
     

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
    index 14e1368..a473d38 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  Reactor Dependency 
    Convergence
     
    @@ -488,22 +488,22 @@
     3.4.10
     
     
    -org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
     oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
    version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
     SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
     - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
     - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
     - version managed from 3.4.6; omitted for dupli
     cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
    - version managed from 3.4.6
     ; omitted for duplicate)
    -org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for dup
     licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
    version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
     - version managed from 3.4.6; omitted for 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    index f655fb8..9ddef5a 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  Project Dependency 
    Management
     
    @@ -804,7 +804,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-02
    +  Last Published: 
    2017-12-03
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    index 9f2b61d..b6c51a5 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  About
     
    @@ -119,7 +119,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-02
    +  Last Published: 
    2017-12-03
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    index 781a534..be52b0c 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  CI Management
     
    @@ -126,7 +126,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-02
    +  Last Published: 
    2017-12-03
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
    index afd4d1d..4c9ea7a 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  Issue Management
     
    @@ -123,7 +123,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-02
    +  Last Published: 
    2017-12-03
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/license.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/license.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/license.html
    index 8b1a6b7..9b72fb6 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/license.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/license.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  Project Licenses
     
    @@ -326,7 +326,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
    index 25e368d..d0f781f 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
    @@ -25,798 +25,798 @@
     017 */
     018package 
    org.apache.hadoop.hbase.io.asyncfs;
     019
    -020import static 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
    -021import static 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
    -022import static 
    org.apache.hadoop.fs.CreateFlag.CREATE;
    -023import static 
    org.apache.hadoop.fs.CreateFlag.OVERWRITE;
    -024import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
    -025import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
    +020import static 
    org.apache.hadoop.fs.CreateFlag.CREATE;
    +021import static 
    org.apache.hadoop.fs.CreateFlag.OVERWRITE;
    +022import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
    +023import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
    +024import static 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
    +025import static 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
     026import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
     027import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;
     028import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
     029import static 
    org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
     030
    -031import 
    org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
    -032import 
    org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
    -033import 
    com.google.protobuf.CodedOutputStream;
    -034
    -035import 
    org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
    -036import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBuf;
    -037import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufAllocator;
    -038import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufOutputStream;
    -039import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.PooledByteBufAllocator;
    -040import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
    -041import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFuture;
    -042import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFutureListener;
    -043import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler;
    -044import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
    -045import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInitializer;
    -046import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPipeline;
    -047import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
    -048import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler;
    -049import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
    -050import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
    -051import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateEvent;
    -052import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateHandler;
    -053import 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Future;
    -054import 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.FutureListener;
    -055import 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
    -056
    -057import java.io.IOException;
    -058import 
    java.lang.reflect.InvocationTargetException;
    -059import java.lang.reflect.Method;
    -060import java.util.ArrayList;
    -061import java.util.EnumSet;
    -062import java.util.List;
    -063import java.util.concurrent.TimeUnit;
    -064
    -065import org.apache.commons.logging.Log;
    -066import 
    org.apache.commons.logging.LogFactory;
    -067import 
    org.apache.hadoop.conf.Configuration;
    -068import 
    org.apache.hadoop.crypto.CryptoProtocolVersion;
    -069import 
    org.apache.hadoop.crypto.Encryptor;
    -070import org.apache.hadoop.fs.CreateFlag;
    -071import org.apache.hadoop.fs.FileSystem;
    -072import 
    org.apache.hadoop.fs.FileSystemLinkResolver;
    -073import org.apache.hadoop.fs.Path;
    -074import 
    org.apache.hadoop.fs.UnresolvedLinkException;
    -075import 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
    index d438f22..7c59e27 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DisableTableFuture.html
    @@ -1290,8 +1290,8 @@
     1282   CompactType 
    compactType) throws IOException {
     1283switch (compactType) {
     1284  case MOB:
    -1285
    compact(this.connection.getAdminForMaster(), getMobRegionInfo(tableName), 
    major,
    -1286  columnFamily);
    +1285
    compact(this.connection.getAdminForMaster(), 
    RegionInfo.createMobRegionInfo(tableName),
    +1286major, columnFamily);
     1287break;
     1288  case NORMAL:
     1289checkTableExists(tableName);
    @@ -3248,7 +3248,7 @@
     3240  new 
    CallableAdminProtos.GetRegionInfoResponse.CompactionState() {
     3241@Override
     3242public 
    AdminProtos.GetRegionInfoResponse.CompactionState call() throws Exception {
    -3243  RegionInfo info = 
    getMobRegionInfo(tableName);
    +3243  RegionInfo info = 
    RegionInfo.createMobRegionInfo(tableName);
     3244  GetRegionInfoRequest 
    request =
     3245
    RequestConverter.buildGetRegionInfoRequest(info.getRegionName(), true);
     3246  GetRegionInfoResponse 
    response = masterAdmin.getRegionInfo(rpcController, request);
    @@ -3312,7 +3312,7 @@
     3304}
     3305break;
     3306  default:
    -3307throw new 
    IllegalArgumentException("Unknowne compactType: " + compactType);
    +3307throw new 
    IllegalArgumentException("Unknown compactType: " + compactType);
     3308}
     3309if (state != null) {
     3310  return 
    ProtobufUtil.createCompactionState(state);
    @@ -3847,325 +3847,320 @@
     3839});
     3840  }
     3841
    -3842  private RegionInfo 
    getMobRegionInfo(TableName tableName) {
    -3843return 
    RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes(".mob")).setRegionId(0)
    -3844.build();
    -3845  }
    -3846
    -3847  private RpcControllerFactory 
    getRpcControllerFactory() {
    -3848return this.rpcControllerFactory;
    -3849  }
    -3850
    -3851  @Override
    -3852  public void addReplicationPeer(String 
    peerId, ReplicationPeerConfig peerConfig, boolean enabled)
    -3853  throws IOException {
    -3854executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3855  @Override
    -3856  protected Void rpcCall() throws 
    Exception {
    -3857
    master.addReplicationPeer(getRpcController(),
    -3858  
    RequestConverter.buildAddReplicationPeerRequest(peerId, peerConfig, 
    enabled));
    -3859return null;
    -3860  }
    -3861});
    -3862  }
    -3863
    -3864  @Override
    -3865  public void 
    removeReplicationPeer(String peerId) throws IOException {
    -3866executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3867  @Override
    -3868  protected Void rpcCall() throws 
    Exception {
    -3869
    master.removeReplicationPeer(getRpcController(),
    -3870  
    RequestConverter.buildRemoveReplicationPeerRequest(peerId));
    -3871return null;
    -3872  }
    -3873});
    -3874  }
    -3875
    -3876  @Override
    -3877  public void 
    enableReplicationPeer(final String peerId) throws IOException {
    -3878executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3879  @Override
    -3880  protected Void rpcCall() throws 
    Exception {
    -3881
    master.enableReplicationPeer(getRpcController(),
    -3882  
    RequestConverter.buildEnableReplicationPeerRequest(peerId));
    -3883return null;
    -3884  }
    -3885});
    -3886  }
    -3887
    -3888  @Override
    -3889  public void 
    disableReplicationPeer(final String peerId) throws IOException {
    -3890executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3891  @Override
    -3892  protected Void rpcCall() throws 
    Exception {
    -3893
    master.disableReplicationPeer(getRpcController(),
    -3894  
    RequestConverter.buildDisableReplicationPeerRequest(peerId));
    -3895return null;
    -3896  }
    -3897});
    -3898  }
    -3899
    -3900  @Override
    -3901  public ReplicationPeerConfig 
    getReplicationPeerConfig(final String peerId) throws IOException {
    -3902return executeCallable(new 
    MasterCallableReplicationPeerConfig(getConnection(),
    -3903getRpcControllerFactory()) {
    -3904  @Override
    -3905  protected ReplicationPeerConfig 
    rpcCall() throws Exception {
    -3906GetReplicationPeerConfigResponse 
    response = master.getReplicationPeerConfig(
    -3907  getRpcController(), 
    

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    index 29ea7b3..6ed75c9 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    @@ -1313,7093 +1313,7082 @@
     1305
     1306  @Override
     1307  public boolean isSplittable() {
    -1308boolean result = isAvailable() 
     !hasReferences();
    -1309LOG.info("ASKED IF SPLITTABLE " + 
    result + " " + getRegionInfo().getShortNameToLog(),
    -1310  new Throwable("LOGGING: 
    REMOVE"));
    -1311// REMOVE BELOW
    -1312LOG.info("DEBUG LIST ALL FILES");
    -1313for (HStore store : 
    this.stores.values()) {
    -1314  LOG.info("store " + 
    store.getColumnFamilyName());
    -1315  for (HStoreFile sf : 
    store.getStorefiles()) {
    -1316
    LOG.info(sf.toStringDetailed());
    -1317  }
    -1318}
    -1319return result;
    -1320  }
    -1321
    -1322  @Override
    -1323  public boolean isMergeable() {
    -1324if (!isAvailable()) {
    -1325  LOG.debug("Region " + this
    -1326  + " is not mergeable because 
    it is closing or closed");
    -1327  return false;
    -1328}
    -1329if (hasReferences()) {
    -1330  LOG.debug("Region " + this
    -1331  + " is not mergeable because 
    it has references");
    -1332  return false;
    -1333}
    -1334
    -1335return true;
    +1308return isAvailable()  
    !hasReferences();
    +1309  }
    +1310
    +1311  @Override
    +1312  public boolean isMergeable() {
    +1313if (!isAvailable()) {
    +1314  LOG.debug("Region " + this
    +1315  + " is not mergeable because 
    it is closing or closed");
    +1316  return false;
    +1317}
    +1318if (hasReferences()) {
    +1319  LOG.debug("Region " + this
    +1320  + " is not mergeable because 
    it has references");
    +1321  return false;
    +1322}
    +1323
    +1324return true;
    +1325  }
    +1326
    +1327  public boolean areWritesEnabled() {
    +1328synchronized(this.writestate) {
    +1329  return 
    this.writestate.writesEnabled;
    +1330}
    +1331  }
    +1332
    +1333  @VisibleForTesting
    +1334  public MultiVersionConcurrencyControl 
    getMVCC() {
    +1335return mvcc;
     1336  }
     1337
    -1338  public boolean areWritesEnabled() {
    -1339synchronized(this.writestate) {
    -1340  return 
    this.writestate.writesEnabled;
    -1341}
    -1342  }
    -1343
    -1344  @VisibleForTesting
    -1345  public MultiVersionConcurrencyControl 
    getMVCC() {
    -1346return mvcc;
    -1347  }
    -1348
    -1349  @Override
    -1350  public long getMaxFlushedSeqId() {
    -1351return maxFlushedSeqId;
    +1338  @Override
    +1339  public long getMaxFlushedSeqId() {
    +1340return maxFlushedSeqId;
    +1341  }
    +1342
    +1343  /**
    +1344   * @return readpoint considering given 
    IsolationLevel. Pass {@code null} for default
    +1345   */
    +1346  public long 
    getReadPoint(IsolationLevel isolationLevel) {
    +1347if (isolationLevel != null 
     isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
    +1348  // This scan can read even 
    uncommitted transactions
    +1349  return Long.MAX_VALUE;
    +1350}
    +1351return mvcc.getReadPoint();
     1352  }
     1353
    -1354  /**
    -1355   * @return readpoint considering given 
    IsolationLevel. Pass {@code null} for default
    -1356   */
    -1357  public long 
    getReadPoint(IsolationLevel isolationLevel) {
    -1358if (isolationLevel != null 
     isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
    -1359  // This scan can read even 
    uncommitted transactions
    -1360  return Long.MAX_VALUE;
    -1361}
    -1362return mvcc.getReadPoint();
    -1363  }
    -1364
    -1365  public boolean 
    isLoadingCfsOnDemandDefault() {
    -1366return 
    this.isLoadingCfsOnDemandDefault;
    -1367  }
    -1368
    -1369  /**
    -1370   * Close down this HRegion.  Flush the 
    cache, shut down each HStore, don't
    -1371   * service any more calls.
    -1372   *
    -1373   * pThis method could take 
    some time to execute, so don't call it from a
    -1374   * time-sensitive thread.
    -1375   *
    -1376   * @return Vector of all the storage 
    files that the HRegion's component
    -1377   * HStores make use of.  It's a list 
    of all StoreFile objects. Returns empty
    -1378   * vector if already closed and null 
    if judged that it should not close.
    -1379   *
    -1380   * @throws IOException e
    -1381   * @throws DroppedSnapshotException 
    Thrown when replay of wal is required
    -1382   * because a Snapshot was not properly 
    persisted. The region is put in closing mode, and the
    -1383   * caller MUST abort after this.
    -1384   */
    -1385  public Mapbyte[], 
    ListHStoreFile close() throws IOException {
    -1386return close(false);
    -1387  }
    -1388
    -1389  private final Object closeLock = new 
    Object();
    -1390
    -1391  /** Conf key for the 

    [11/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
    index d98042d..d549086 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
    @@ -42,2537 +42,2536 @@
     034
     035import org.apache.commons.logging.Log;
     036import 
    org.apache.commons.logging.LogFactory;
    -037import 
    org.apache.yetus.audience.InterfaceAudience;
    +037import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
     038import 
    org.apache.hadoop.hbase.util.ByteBufferUtils;
     039import 
    org.apache.hadoop.hbase.util.Bytes;
     040import 
    org.apache.hadoop.hbase.util.ClassSize;
     041import 
    org.apache.hadoop.io.RawComparator;
    -042
    -043import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
    -044/**
    -045 * An HBase Key/Value. This is the 
    fundamental HBase Type.
    -046 * p
    -047 * HBase applications and users should 
    use the Cell interface and avoid directly using KeyValue and
    -048 * member functions not defined in 
    Cell.
    -049 * p
    -050 * If being used client-side, the primary 
    methods to access individual fields are
    -051 * {@link #getRowArray()}, {@link 
    #getFamilyArray()}, {@link #getQualifierArray()},
    -052 * {@link #getTimestamp()}, and {@link 
    #getValueArray()}. These methods allocate new byte arrays
    -053 * and return copies. Avoid their use 
    server-side.
    -054 * p
    -055 * Instances of this class are immutable. 
    They do not implement Comparable but Comparators are
    -056 * provided. Comparators change with 
    context, whether user table or a catalog table comparison. Its
    -057 * critical you use the appropriate 
    comparator. There are Comparators for normal HFiles, Meta's
    -058 * Hfiles, and bloom filter keys.
    -059 * p
    -060 * KeyValue wraps a byte array and takes 
    offsets and lengths into passed array at where to start
    -061 * interpreting the content as KeyValue. 
    The KeyValue format inside a byte array is:
    -062 * codelt;keylengthgt; 
    lt;valuelengthgt; lt;keygt; 
    lt;valuegt;/code Key is further
    -063 * decomposed as: 
    codelt;rowlengthgt; lt;rowgt; 
    lt;columnfamilylengthgt;
    -064 * lt;columnfamilygt; 
    lt;columnqualifiergt;
    -065 * lt;timestampgt; 
    lt;keytypegt;/code The coderowlength/code 
    maximum is
    -066 * 
    codeShort.MAX_SIZE/code, column family length maximum is 
    codeByte.MAX_SIZE/code, and
    -067 * column qualifier + key length must be 
    lt; codeInteger.MAX_SIZE/code. The column does not
    -068 * contain the family/qualifier 
    delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
    -069 * KeyValue can optionally contain Tags. 
    When it contains tags, it is added in the byte array after
    -070 * the value part. The format for this 
    part is: 
    codelt;tagslengthgt;lt;tagsbytesgt;/code.
    -071 * codetagslength/code 
    maximum is codeShort.MAX_SIZE/code. The 
    codetagsbytes/code
    -072 * contain one or more tags where as each 
    tag is of the form
    -073 * 
    codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
     codetagtype/code is one byte
    -074 * and codetaglength/code 
    maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
    type
    -075 * length and actual tag bytes length.
    -076 */
    -077@InterfaceAudience.Private
    -078public class KeyValue implements 
    ExtendedCell {
    -079  private static final 
    ArrayListTag EMPTY_ARRAY_LIST = new ArrayList();
    -080
    -081  private static final Log LOG = 
    LogFactory.getLog(KeyValue.class);
    -082
    -083  public static final long FIXED_OVERHEAD 
    = ClassSize.OBJECT + // the KeyValue object itself
    -084  ClassSize.REFERENCE + // pointer to 
    "bytes"
    -085  2 * Bytes.SIZEOF_INT + // offset, 
    length
    -086  Bytes.SIZEOF_LONG;// memstoreTS
    -087
    -088  /**
    -089   * Colon character in UTF-8
    -090   */
    -091  public static final char 
    COLUMN_FAMILY_DELIMITER = ':';
    -092
    -093  public static final byte[] 
    COLUMN_FAMILY_DELIM_ARRAY =
    -094new 
    byte[]{COLUMN_FAMILY_DELIMITER};
    -095
    -096  /**
    -097   * Comparator for plain key/values; 
    i.e. non-catalog table key/values. Works on Key portion
    -098   * of KeyValue only.
    -099   * @deprecated Use {@link 
    CellComparator#getInstance()} instead. Deprecated for hbase 2.0, remove for 
    hbase 3.0.
    -100   */
    -101  @Deprecated
    -102  public static final KVComparator 
    COMPARATOR = new KVComparator();
    -103  /**
    -104   * A {@link KVComparator} for 
    codehbase:meta/code catalog table
    -105   * {@link KeyValue}s.
    -106   * @deprecated Use {@link 
    CellComparatorImpl#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove 
    for hbase 3.0.
    -107   */
    -108  @Deprecated
    -109  public static final KVComparator 
    META_COMPARATOR = new MetaComparator();
    -110
    -111  /** Size of the key length field in 
    bytes*/
    -112  public static final int KEY_LENGTH_SIZE 
    = Bytes.SIZEOF_INT;
    -113
    -114  /** 

      1   2   3   >