[10/51] [partial] hbase-site git commit: Published site at 620d70d6186fb800299bcc62ad7179fccfd1be41.

2019-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
index 2290ca8..8a42603 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
@@ -583,7 +583,7 @@
 575  public ListTTableDescriptor 
getTableDescriptorsByPattern(String regex, boolean includeSysTables)
 576  throws TIOError, TException {
 577try {
-578  Pattern pattern = 
Pattern.compile(regex);
+578  Pattern pattern = (regex == null ? 
null : Pattern.compile(regex));
 579  ListTableDescriptor 
tableDescriptors = connectionCache.getAdmin()
 580  .listTableDescriptors(pattern, 
includeSysTables);
 581  return 
tableDescriptorsFromHBase(tableDescriptors);
@@ -608,7 +608,7 @@
 600  public ListTTableName 
getTableNamesByPattern(String regex, boolean includeSysTables)
 601  throws TIOError, TException {
 602try {
-603  Pattern pattern = 
Pattern.compile(regex);
+603  Pattern pattern = (regex == null ? 
null : Pattern.compile(regex));
 604  TableName[] tableNames = 
connectionCache.getAdmin()
 605  .listTableNames(pattern, 
includeSysTables);
 606  return 
tableNamesFromHBase(tableNames);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
index 2290ca8..8a42603 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
@@ -583,7 +583,7 @@
 575  public ListTTableDescriptor 
getTableDescriptorsByPattern(String regex, boolean includeSysTables)
 576  throws TIOError, TException {
 577try {
-578  Pattern pattern = 
Pattern.compile(regex);
+578  Pattern pattern = (regex == null ? 
null : Pattern.compile(regex));
 579  ListTableDescriptor 
tableDescriptors = connectionCache.getAdmin()
 580  .listTableDescriptors(pattern, 
includeSysTables);
 581  return 
tableDescriptorsFromHBase(tableDescriptors);
@@ -608,7 +608,7 @@
 600  public ListTTableName 
getTableNamesByPattern(String regex, boolean includeSysTables)
 601  throws TIOError, TException {
 602try {
-603  Pattern pattern = 
Pattern.compile(regex);
+603  Pattern pattern = (regex == null ? 
null : Pattern.compile(regex));
 604  TableName[] tableNames = 
connectionCache.getAdmin()
 605  .listTableNames(pattern, 
includeSysTables);
 606  return 
tableNamesFromHBase(tableNames);



[10/51] [partial] hbase-site git commit: Published site at 281d6429e55149cc4c05430dcc1d1dc136d8b245.

2019-01-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/quotas/MasterQuotaManager.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/MasterQuotaManager.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/MasterQuotaManager.html
index 0e574e9..4885bc0 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/MasterQuotaManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/MasterQuotaManager.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class MasterQuotaManager
+public class MasterQuotaManager
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RegionStateListener
 Master Quota Manager.
@@ -202,10 +202,14 @@ implements regionSizes
 
 
+private RpcThrottleStorage
+rpcThrottleStorage
+
+
 private MasterQuotaManager.NamedLockTableName
 tableLocks
 
-
+
 private MasterQuotaManager.NamedLockhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 userLocks
 
@@ -282,96 +286,104 @@ implements isQuotaInitialized()
 
 
+org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRpcThrottleEnabledResponse
+isRpcThrottleEnabled(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRpcThrottleEnabledRequestrequest)
+
+
 void
 onRegionMerged(RegionInfomergedRegion)
 Process region merge event.
 
 
-
+
 void
 onRegionSplit(RegionInfohri)
 Process region split event.
 
 
-
+
 void
 onRegionSplitReverted(RegionInfohri)
 Process region split reverted event.
 
 
-
+
 void
 processFileArchivals(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationRequestrequest,
 Connectionconn,
 org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.fs.FileSystemfs)
 
-
+
 (package private) int
 pruneEntriesOlderThan(longtimeToPruneBefore)
 
-
+
 void
 removeNamespaceQuota(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringnamespace)
 
-
+
 void
 removeTableFromNamespaceQuota(TableNametName)
 Remove table from namespace quota.
 
 
-
+
 void
 setNamespaceQuota(NamespaceDescriptordesc)
 
-
+
 void
 setNamespaceQuota(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse
 setQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq)
 
-
+
 private void
 setQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq,
 MasterQuotaManager.SetQuotaOperationsquotaOps)
 
-
+
 void
 setTableQuota(TableNametable,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq)
 
-
+
 void
 setUserQuota(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringuserName,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq)
 
-
+
 void
 setUserQuota(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringuserName,
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq)
 
-
+
 void
 setUserQuota(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringuserName,
 TableNametable,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestreq)
 
-
+
 

[10/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
index 736388b..197b99d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.Converter.html
@@ -26,3624 +26,3599 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021
-022import com.google.protobuf.Message;
-023import com.google.protobuf.RpcChannel;
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.EnumSet;
-029import java.util.HashMap;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import 
java.util.concurrent.ConcurrentHashMap;
-036import java.util.concurrent.TimeUnit;
-037import 
java.util.concurrent.atomic.AtomicReference;
-038import java.util.function.BiConsumer;
-039import java.util.function.Function;
-040import java.util.function.Supplier;
-041import java.util.regex.Pattern;
-042import java.util.stream.Collectors;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.CacheEvictionStats;
-048import 
org.apache.hadoop.hbase.CacheEvictionStatsAggregator;
-049import 
org.apache.hadoop.hbase.ClusterMetrics;
-050import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-051import 
org.apache.hadoop.hbase.ClusterMetricsBuilder;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-056import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.RegionMetrics;
-059import 
org.apache.hadoop.hbase.RegionMetricsBuilder;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-071import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-072import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-073import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.replication.SyncReplicationState;
-083import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-084import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-085import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-088import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-089import 
org.apache.yetus.audience.InterfaceAudience;
-090import org.slf4j.Logger;
-091import org.slf4j.LoggerFactory;
-092
-093import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-094import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-095import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
-096import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
-097import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
-098import 

[10/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
index 1a0f64e..2290ca8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.TIOErrorWithCause.html
@@ -26,849 +26,796 @@
 018 */
 019package 
org.apache.hadoop.hbase.thrift2;
 020
-021import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.appendFromThrift;
-022import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.columnFamilyDescriptorFromThrift;
-023import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.compareOpFromThrift;
-024import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deleteFromThrift;
-025import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.deletesFromThrift;
-026import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getFromThrift;
-027import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.getsFromThrift;
-028import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.incrementFromThrift;
-029import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromHBase;
-030import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorFromThrift;
-031import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.namespaceDescriptorsFromHBase;
-032import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putFromThrift;
-033import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.putsFromThrift;
-034import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultFromHBase;
-035import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.resultsFromHBase;
-036import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.rowMutationsFromThrift;
-037import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.scanFromThrift;
-038import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.splitKeyFromThrift;
-039import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromHBase;
-040import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorFromThrift;
-041import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableDescriptorsFromHBase;
-042import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNameFromThrift;
-043import static 
org.apache.hadoop.hbase.thrift2.ThriftUtilities.tableNamesFromHBase;
-044import static 
org.apache.thrift.TBaseHelper.byteBufferToByteArray;
-045
-046import java.io.IOException;
-047import 
java.lang.reflect.InvocationHandler;
-048import 
java.lang.reflect.InvocationTargetException;
-049import java.lang.reflect.Method;
-050import java.lang.reflect.Proxy;
-051import java.nio.ByteBuffer;
-052import java.util.ArrayList;
-053import java.util.Collections;
-054import java.util.List;
-055import java.util.Map;
-056import 
java.util.concurrent.ConcurrentHashMap;
-057import 
java.util.concurrent.atomic.AtomicInteger;
-058import java.util.regex.Pattern;
-059
-060import 
org.apache.hadoop.conf.Configuration;
-061import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-062import 
org.apache.hadoop.hbase.HRegionLocation;
-063import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-064import 
org.apache.hadoop.hbase.TableName;
-065import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-066import 
org.apache.hadoop.hbase.client.RegionLocator;
-067import 
org.apache.hadoop.hbase.client.ResultScanner;
-068import 
org.apache.hadoop.hbase.client.Table;
-069import 
org.apache.hadoop.hbase.client.TableDescriptor;
-070import 
org.apache.hadoop.hbase.security.UserProvider;
-071import 
org.apache.hadoop.hbase.thrift.ThriftMetrics;
-072import 
org.apache.hadoop.hbase.thrift2.generated.TAppend;
-073import 
org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor;
-074import 
org.apache.hadoop.hbase.thrift2.generated.TCompareOp;
-075import 
org.apache.hadoop.hbase.thrift2.generated.TDelete;
-076import 
org.apache.hadoop.hbase.thrift2.generated.TGet;
-077import 
org.apache.hadoop.hbase.thrift2.generated.THBaseService;
-078import 
org.apache.hadoop.hbase.thrift2.generated.THRegionLocation;
-079import 
org.apache.hadoop.hbase.thrift2.generated.TIOError;
-080import 
org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument;
-081import 
org.apache.hadoop.hbase.thrift2.generated.TIncrement;
-082import 
org.apache.hadoop.hbase.thrift2.generated.TNamespaceDescriptor;
-083import 
org.apache.hadoop.hbase.thrift2.generated.TPut;
-084import 
org.apache.hadoop.hbase.thrift2.generated.TResult;
-085import 

[10/51] [partial] hbase-site git commit: Published site at 3ab895979b643a2980bcdb7fee2078f14b614210.

2019-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.html
index 5c4c17f..3de66ea 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.html
@@ -305,6 +305,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.html
index 3f228ae..c12387e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.html
@@ -305,6 +305,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.html
index 0d25009..cf8508a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.html
@@ -305,6 +305,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/WrongVersionException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/WrongVersionException.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/WrongVersionException.html
index bd8edae..e7f3792 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/WrongVersionException.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/WrongVersionException.html
@@ -305,6 +305,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.AbstractRpcChannel.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.AbstractRpcChannel.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.AbstractRpcChannel.html
index c9f1035..e7cac53 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.AbstractRpcChannel.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.AbstractRpcChannel.html
@@ -170,6 +170,6 @@
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.BlockingRpcChannelImplementation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.BlockingRpcChannelImplementation.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.BlockingRpcChannelImplementation.html
index d2adf0a..4d21e33 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.BlockingRpcChannelImplementation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/AbstractRpcClient.BlockingRpcChannelImplementation.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 

[10/51] [partial] hbase-site git commit: Published site at 7820ba1dbdba58b1002cdfde08eb21aa7a0bb6da.

2018-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09ea0d5f/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
index 0f5a095..50bf692 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
@@ -78,8712 +78,8714 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.hadoop.conf.Configuration;
-074import org.apache.hadoop.fs.FileStatus;
-075import org.apache.hadoop.fs.FileSystem;
-076import 
org.apache.hadoop.fs.LocatedFileStatus;
-077import org.apache.hadoop.fs.Path;
-078import org.apache.hadoop.hbase.Cell;
-079import 
org.apache.hadoop.hbase.CellBuilderType;
-080import 
org.apache.hadoop.hbase.CellComparator;
-081import 
org.apache.hadoop.hbase.CellComparatorImpl;
-082import 
org.apache.hadoop.hbase.CellScanner;
-083import 
org.apache.hadoop.hbase.CellUtil;
-084import 
org.apache.hadoop.hbase.CompareOperator;
-085import 
org.apache.hadoop.hbase.CompoundConfiguration;
-086import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-087import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-088import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-089import 
org.apache.hadoop.hbase.HConstants;
-090import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-091import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-092import 
org.apache.hadoop.hbase.KeyValue;
-093import 
org.apache.hadoop.hbase.KeyValueUtil;
-094import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-095import 
org.apache.hadoop.hbase.NotServingRegionException;
-096import 
org.apache.hadoop.hbase.PrivateCellUtil;
-097import 
org.apache.hadoop.hbase.RegionTooBusyException;
-098import org.apache.hadoop.hbase.Tag;
-099import org.apache.hadoop.hbase.TagUtil;
-100import 
org.apache.hadoop.hbase.UnknownScannerException;
-101import 
org.apache.hadoop.hbase.client.Append;
-102import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-103import 
org.apache.hadoop.hbase.client.CompactionState;
-104import 
org.apache.hadoop.hbase.client.Delete;
-105import 
org.apache.hadoop.hbase.client.Durability;
-106import 
org.apache.hadoop.hbase.client.Get;
-107import 
org.apache.hadoop.hbase.client.Increment;
-108import 
org.apache.hadoop.hbase.client.IsolationLevel;
-109import 
org.apache.hadoop.hbase.client.Mutation;
-110import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-111import 
org.apache.hadoop.hbase.client.Put;
-112import 
org.apache.hadoop.hbase.client.RegionInfo;
-113import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-114import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-115import 
org.apache.hadoop.hbase.client.Result;
-116import 
org.apache.hadoop.hbase.client.RowMutations;
-117import 
org.apache.hadoop.hbase.client.Scan;
-118import 
org.apache.hadoop.hbase.client.TableDescriptor;
-119import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-120import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-121import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-122import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-123import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-124import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-125import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-126import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-127import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-128import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-129import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-130import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-131import 
org.apache.hadoop.hbase.io.HFileLink;
-132import 
org.apache.hadoop.hbase.io.HeapSize;
-133import 
org.apache.hadoop.hbase.io.TimeRange;
-134import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.mob.MobFileCache;
-141import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-142import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-143import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-144import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-145import 

[10/51] [partial] hbase-site git commit: Published site at c448604ceb987d113913f0583452b2abce04db0d.

2018-12-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f8b8424/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.CellBasedKeyBlockIndexReader.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.CellBasedKeyBlockIndexReader.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.CellBasedKeyBlockIndexReader.html
index 1124f8b..0c29054 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.CellBasedKeyBlockIndexReader.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.CellBasedKeyBlockIndexReader.html
@@ -,587 +,592 @@
 1103  
blockStream.write(midKeyMetadata);
 1104
blockWriter.writeHeaderAndData(out);
 1105if (cacheConf != null) {
-1106  HFileBlock blockForCaching = 
blockWriter.getBlockForCaching(cacheConf);
-1107  
cacheConf.getBlockCache().cacheBlock(new BlockCacheKey(nameForCaching,
-1108rootLevelIndexPos, true, 
blockForCaching.getBlockType()), blockForCaching);
-1109}
-1110  }
-
-1112  // Add root index block size
-1113  totalBlockOnDiskSize += 
blockWriter.getOnDiskSizeWithoutHeader();
-1114  totalBlockUncompressedSize +=
-1115  
blockWriter.getUncompressedSizeWithoutHeader();
-1116
-1117  if (LOG.isTraceEnabled()) {
-1118LOG.trace("Wrote a " + numLevels 
+ "-level index with root level at pos "
-1119  + rootLevelIndexPos + ", " + 
rootChunk.getNumEntries()
-1120  + " root-level entries, " + 
totalNumEntries + " total entries, "
-1121  + 
StringUtils.humanReadableInt(this.totalBlockOnDiskSize) +
-1122  " on-disk size, "
-1123  + 
StringUtils.humanReadableInt(totalBlockUncompressedSize) +
-1124  " total uncompressed 
size.");
-1125  }
-1126  return rootLevelIndexPos;
-1127}
-1128
-1129/**
-1130 * Writes the block index data as a 
single level only. Does not do any
-1131 * block framing.
-1132 *
-1133 * @param out the buffered output 
stream to write the index to. Typically a
-1134 *  stream writing into an 
{@link HFile} block.
-1135 * @param description a short 
description of the index being written. Used
-1136 *  in a log message.
-1137 * @throws IOException
-1138 */
-1139public void 
writeSingleLevelIndex(DataOutput out, String description)
-1140throws IOException {
-1141  expectNumLevels(1);
-1142
-1143  if (!singleLevelOnly)
-1144throw new 
IOException("Single-level mode is turned off");
-1145
-1146  if (rootChunk.getNumEntries()  
0)
-1147throw new 
IOException("Root-level entries already added in " +
-1148"single-level mode");
-1149
-1150  rootChunk = curInlineChunk;
-1151  curInlineChunk = new 
BlockIndexChunk();
-1152
-1153  if (LOG.isTraceEnabled()) {
-1154LOG.trace("Wrote a single-level 
" + description + " index with "
-1155  + rootChunk.getNumEntries() + 
" entries, " + rootChunk.getRootSize()
-1156  + " bytes");
-1157  }
-1158  rootChunk.writeRoot(out);
-1159}
-1160
-1161/**
-1162 * Split the current level of the 
block index into intermediate index
-1163 * blocks of permitted size and 
write those blocks to disk. Return the next
-1164 * level of the block index 
referencing those intermediate-level blocks.
-1165 *
-1166 * @param out
-1167 * @param currentLevel the current 
level of the block index, such as the a
-1168 *  chunk referencing all 
leaf-level index blocks
-1169 * @return the parent level block 
index, which becomes the root index after
-1170 * a few (usually zero) 
iterations
-1171 * @throws IOException
-1172 */
-1173private BlockIndexChunk 
writeIntermediateLevel(FSDataOutputStream out,
-1174BlockIndexChunk currentLevel) 
throws IOException {
-1175  // Entries referencing 
intermediate-level blocks we are about to create.
-1176  BlockIndexChunk parent = new 
BlockIndexChunk();
-1177
-1178  // The current intermediate-level 
block index chunk.
-1179  BlockIndexChunk curChunk = new 
BlockIndexChunk();
-1180
-1181  for (int i = 0; i  
currentLevel.getNumEntries(); ++i) {
-1182
curChunk.add(currentLevel.getBlockKey(i),
-1183
currentLevel.getBlockOffset(i), currentLevel.getOnDiskDataSize(i));
-1184
-1185// HBASE-16288: We have to have 
at least minIndexNumEntries(16) items in the index so that
-1186// we won't end up with too-many 
levels for a index with very large rowKeys. Also, if the
-1187// first key is larger than 
maxChunkSize this will cause infinite recursion.
-1188if (i = minIndexNumEntries 
 curChunk.getRootSize() = maxChunkSize) {
-1189  writeIntermediateBlock(out, 
parent, curChunk);
-1190 

[10/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

2018-12-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
index a957d31..62f81b6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
@@ -142,5192 +142,5186 @@
 134import org.apache.hadoop.hbase.wal.WAL;
 135import 
org.apache.hadoop.hbase.wal.WALFactory;
 136import 
org.apache.hadoop.hbase.wal.WALSplitter;
-137import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-138import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-139import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-140import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-141import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-142import 
org.apache.hadoop.ipc.RemoteException;
-143import 
org.apache.hadoop.security.UserGroupInformation;
-144import 
org.apache.hadoop.util.ReflectionUtils;
-145import org.apache.hadoop.util.Tool;
-146import 
org.apache.hadoop.util.ToolRunner;
-147import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-148import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
-149import 
org.apache.yetus.audience.InterfaceAudience;
-150import 
org.apache.yetus.audience.InterfaceStability;
-151import 
org.apache.zookeeper.KeeperException;
-152import org.slf4j.Logger;
-153import org.slf4j.LoggerFactory;
-154
-155import 
org.apache.hbase.thirdparty.com.google.common.base.Joiner;
-156import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-157import 
org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;
-158import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-159import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-160import 
org.apache.hbase.thirdparty.com.google.common.collect.Ordering;
-161import 
org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;
-162
-163import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-164import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-165
-166/**
-167 * HBaseFsck (hbck) is a tool for 
checking and repairing region consistency and
-168 * table integrity problems in a 
corrupted HBase. This tool was written for hbase-1.x. It does not
-169 * work with hbase-2.x; it can read state 
but is not allowed to change state; i.e. effect 'repair'.
-170 * See hbck2 (HBASE-19121) for a hbck 
tool for hbase2.
-171 *
-172 * p
-173 * Region consistency checks verify that 
hbase:meta, region deployment on region
-174 * servers and the state of data in HDFS 
(.regioninfo files) all are in
-175 * accordance.
-176 * p
-177 * Table integrity checks verify that all 
possible row keys resolve to exactly
-178 * one region of a table.  This means 
there are no individual degenerate
-179 * or backwards regions; no holes between 
regions; and that there are no
-180 * overlapping regions.
-181 * p
-182 * The general repair strategy works in 
two phases:
-183 * ol
-184 * li Repair Table Integrity on 
HDFS. (merge or fabricate regions)
-185 * li Repair Region Consistency 
with hbase:meta and assignments
-186 * /ol
-187 * p
-188 * For table integrity repairs, the 
tables' region directories are scanned
-189 * for .regioninfo files.  Each table's 
integrity is then verified.  If there
-190 * are any orphan regions (regions with 
no .regioninfo files) or holes, new
-191 * regions are fabricated.  Backwards 
regions are sidelined as well as empty
-192 * degenerate (endkey==startkey) regions. 
 If there are any overlapping regions,
-193 * a new region is created and all data 
is merged into the new region.
-194 * p
-195 * Table integrity repairs deal solely 
with HDFS and could potentially be done
-196 * offline -- the hbase region servers or 
master do not need to be running.
-197 * This phase can eventually be used to 
completely reconstruct the hbase:meta table in
-198 * an offline fashion.
-199 * p
-200 * Region consistency requires three 
conditions -- 1) valid .regioninfo file
-201 * present in an HDFS region dir,  2) 
valid row with .regioninfo data in META,
-202 * and 3) a region is deployed only at 
the regionserver that was assigned to
-203 * with proper state in the master.
-204 * p
-205 * Region consistency repairs require 
hbase to be online so that hbck can
-206 * contact the HBase master and region 
servers.  The hbck#connect() method must
-207 * first be called successfully.  Much of 
the region consistency information
-208 * is transient and less risky to 
repair.
-209 * p
-210 * If hbck is run from the command line, 
there are a 

[10/51] [partial] hbase-site git commit: Published site at 1acbd36c903b048141866b143507bfce124a5c5f.

2018-11-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/util/FSTableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSTableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/util/FSTableDescriptors.html
index 025cb9e..2f85de8 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSTableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSTableDescriptors.html
@@ -639,7 +639,7 @@ implements 
 
 TABLEINFO_FILESTATUS_COMPARATOR
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in 
java.util">Comparatororg.apache.hadoop.fs.FileStatus TABLEINFO_FILESTATUS_COMPARATOR
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in 
java.util">Comparatororg.apache.hadoop.fs.FileStatus TABLEINFO_FILESTATUS_COMPARATOR
 Compare FileStatus instances by 
Path.getName(). Returns in
  reverse order.
 
@@ -650,7 +650,7 @@ implements 
 
 TABLEINFO_PATHFILTER
-private static finalorg.apache.hadoop.fs.PathFilter TABLEINFO_PATHFILTER
+private static finalorg.apache.hadoop.fs.PathFilter TABLEINFO_PATHFILTER
 
 
 
@@ -659,7 +659,7 @@ implements 
 
 WIDTH_OF_SEQUENCE_ID
-static finalint WIDTH_OF_SEQUENCE_ID
+static finalint WIDTH_OF_SEQUENCE_ID
 Width of the sequenceid that is a suffix on a tableinfo 
file.
 
 See Also:
@@ -673,7 +673,7 @@ implements 
 
 TABLEINFO_FILE_REGEX
-private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern TABLEINFO_FILE_REGEX
+private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern TABLEINFO_FILE_REGEX
 Regex to eat up sequenceid suffix on a .tableinfo file.
  Use regex because may encounter oldstyle .tableinfos where there is no
  sequenceid on the end.
@@ -794,7 +794,7 @@ implements 
 
 createMetaTableDescriptor
-public staticTableDescriptorcreateMetaTableDescriptor(org.apache.hadoop.conf.Configurationconf)
+public staticTableDescriptorcreateMetaTableDescriptor(org.apache.hadoop.conf.Configurationconf)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -808,7 +808,7 @@ implements 
 
 setCacheOn
-publicvoidsetCacheOn()
+publicvoidsetCacheOn()
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:TableDescriptors
 Enables the tabledescriptor cache
@@ -826,7 +826,7 @@ implements 
 
 setCacheOff
-publicvoidsetCacheOff()
+publicvoidsetCacheOff()
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:TableDescriptors
 Disables the tabledescriptor cache
@@ -844,7 +844,7 @@ implements 
 
 isUsecache
-publicbooleanisUsecache()
+publicbooleanisUsecache()
 
 
 
@@ -854,7 +854,7 @@ implements 
 get
 @Nullable
-publicTableDescriptorget(TableNametablename)
+publicTableDescriptorget(TableNametablename)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Get the current table descriptor for the given table, or 
null if none exists.
 
@@ -876,7 +876,7 @@ public
 
 getAll
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,TableDescriptorgetAll()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,TableDescriptorgetAll()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Returns a map from table name to table descriptor for all 
tables.
 
@@ -895,7 +895,7 @@ public
 
 getByNamespace
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[10/51] [partial] hbase-site git commit: Published site at 130057f13774f6b213cdb06952c805a29d59396e.

2018-11-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.html 
b/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.html
index 022325c..48a55d3 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestDataBlockEncoders
+public class TestDataBlockEncoders
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Test all of the data block encoding algorithms for 
correctness. Most of the
  class generate data which will test different branches in code.
@@ -274,11 +274,15 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 void
+testRowIndexWithTagsButNoTagsInCell()
+
+
+void
 testSeekingOnSample()
 Test seeking while file is encoded.
 
 
-
+
 void
 testZeroByte()
 
@@ -310,7 +314,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 CLASS_RULE
-public static finalHBaseClassTestRule CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
 
 
 
@@ -319,7 +323,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -328,7 +332,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 NUMBER_OF_KV
-private staticint NUMBER_OF_KV
+private staticint NUMBER_OF_KV
 
 
 
@@ -337,7 +341,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 NUM_RANDOM_SEEKS
-private staticint NUM_RANDOM_SEEKS
+private staticint NUM_RANDOM_SEEKS
 
 
 
@@ -346,7 +350,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ENCODED_DATA_OFFSET
-private staticint ENCODED_DATA_OFFSET
+private staticint ENCODED_DATA_OFFSET
 
 
 
@@ -355,7 +359,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 HFILEBLOCK_DUMMY_HEADER
-static finalbyte[] HFILEBLOCK_DUMMY_HEADER
+static finalbyte[] HFILEBLOCK_DUMMY_HEADER
 
 
 
@@ -364,7 +368,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 generator
-privateRedundantKVGenerator generator
+privateRedundantKVGenerator generator
 
 
 
@@ -373,7 +377,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 randomizer
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random randomizer
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random randomizer
 
 
 
@@ -382,7 +386,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 includesMemstoreTS
-private finalboolean includesMemstoreTS
+private finalboolean includesMemstoreTS
 
 
 
@@ -391,7 +395,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 includesTags
-private finalboolean includesTags
+private finalboolean includesTags
 
 
 
@@ -400,7 +404,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 useOffheapData
-private finalboolean useOffheapData
+private finalboolean useOffheapData
 
 
 
@@ -417,7 +421,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 TestDataBlockEncoders
-publicTestDataBlockEncoders(booleanincludesMemstoreTS,
+publicTestDataBlockEncoders(booleanincludesMemstoreTS,
  booleanincludesTag,
  booleanuseOffheapData)
 
@@ -436,7 +440,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 parameters
-public statichttps://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]parameters()
+public statichttps://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 

[10/51] [partial] hbase-site git commit: Published site at d5e4faacc354c1bc4d93efa71ca97ee3a056123e.

2018-10-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b5e107c3/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableSharedProcedureWithId.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableSharedProcedureWithId.html
 
b/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableSharedProcedureWithId.html
new file mode 100644
index 000..e7fa652
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/master/procedure/TestSchedulerQueueDeadLock.TableSharedProcedureWithId.html
@@ -0,0 +1,357 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+TestSchedulerQueueDeadLock.TableSharedProcedureWithId (Apache HBase 
3.0.0-SNAPSHOT Test API)
+
+
+
+
+
+var methods = {"i0":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.master.procedure
+Class TestSchedulerQueueDeadLock.TableSharedProcedureWithId
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureTEnv
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedureTestSchedulerQueueDeadLock.TestEnv
+
+
+org.apache.hadoop.hbase.master.procedure.TestSchedulerQueueDeadLock.TableSharedProcedure
+
+
+org.apache.hadoop.hbase.master.procedure.TestSchedulerQueueDeadLock.TableSharedProcedureWithId
+
+
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">Comparableorg.apache.hadoop.hbase.procedure2.ProcedureTestSchedulerQueueDeadLock.TestEnv,
 org.apache.hadoop.hbase.master.procedure.TableProcedureInterface
+
+
+Enclosing class:
+TestSchedulerQueueDeadLock
+
+
+
+public static final class TestSchedulerQueueDeadLock.TableSharedProcedureWithId
+extends TestSchedulerQueueDeadLock.TableSharedProcedure
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+org.apache.hadoop.hbase.procedure2.Procedure.LockState
+
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hadoop.hbase.master.procedure.TableProcedureInterface
+org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
+
+
+
+
+
+
+
+
+Field Summary
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+NO_PROC_ID, NO_TIMEOUT
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+TableSharedProcedureWithId()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected void
+setProcId(longprocId)
+
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.master.procedure.TestSchedulerQueueDeadLock.TableSharedProcedure
+acquireLock,
 execute,
 getTableName,
 getTableOperationType,
 holdLoc
 k, releaseLock
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure
+abort,
 deserializeStateData,
 rollback,
 serializeStateData
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+addStackIndex, afterReplay, beforeReplay, bypass, compareTo, 
completionCleanup, doExecute, doRollback, elapsedTime, getChildrenLatch, 
getException, getLastUpdate, getNonceKey, getOwner, getParentProcId, 
getProcedureMetrics, getProcId, getProcIdHashCode, getProcName, getResult, 
getRootProcedureId, getRootProcId, getStackIndexes, getState, getSubmittedTime, 
getTimeout, getTimeoutTimestamp, hasChildren, hasException, hasLock, hasOwner, 
hasParent, hasTimeout, haveSameParent, incChildrenLatch, isBypass, isFailed, 
isFinished, isInitializing, isLockedWhenLoading, isRunnable, isSuccess, 
isWaiting, isYieldAfterExecutionStep, 

[10/51] [partial] hbase-site git commit: Published site at 3fe8649b2c9ba1271c25e8f476548907e4c7a90d.

2018-10-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8f09a71d/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.DeleteState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.DeleteState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.DeleteState.html
index 1579eab..dc5908e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.DeleteState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.DeleteState.html
@@ -31,221 +31,221 @@
 023import java.util.Iterator;
 024import java.util.Map;
 025import java.util.TreeMap;
-026import java.util.stream.LongStream;
-027import 
org.apache.hadoop.hbase.procedure2.Procedure;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.yetus.audience.InterfaceStability;
-030
-031import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-032
-033/**
-034 * Keeps track of live procedures.
-035 *
-036 * It can be used by the ProcedureStore 
to identify which procedures are already
-037 * deleted/completed to avoid the 
deserialization step on restart
-038 */
-039@InterfaceAudience.Private
-040@InterfaceStability.Evolving
-041public class ProcedureStoreTracker {
-042  // Key is procedure id corresponding to 
first bit of the bitmap.
-043  private final TreeMapLong, 
BitSetNode map = new TreeMap();
-044
-045  /**
-046   * If true, do not remove bits 
corresponding to deleted procedures. Note that this can result
-047   * in huge bitmaps overtime.
-048   * Currently, it's set to true only 
when building tracker state from logs during recovery. During
-049   * recovery, if we are sure that a 
procedure has been deleted, reading its old update entries
-050   * can be skipped.
-051   */
-052  private boolean keepDeletes = false;
-053  /**
-054   * If true, it means tracker has 
incomplete information about the active/deleted procedures.
-055   * It's set to true only when 
recovering from old logs. See {@link #isDeleted(long)} docs to
-056   * understand it's real use.
-057   */
-058  boolean partial = false;
-059
-060  private long minModifiedProcId = 
Long.MAX_VALUE;
-061  private long maxModifiedProcId = 
Long.MIN_VALUE;
-062
-063  public enum DeleteState { YES, NO, 
MAYBE }
-064
-065  public void 
resetToProto(ProcedureProtos.ProcedureStoreTracker trackerProtoBuf) {
-066reset();
-067for 
(ProcedureProtos.ProcedureStoreTracker.TrackerNode protoNode: 
trackerProtoBuf.getNodeList()) {
-068  final BitSetNode node = new 
BitSetNode(protoNode);
-069  map.put(node.getStart(), node);
-070}
-071  }
-072
-073  /**
-074   * Resets internal state to same as 
given {@code tracker}. Does deep copy of the bitmap.
-075   */
-076  public void 
resetTo(ProcedureStoreTracker tracker) {
-077resetTo(tracker, false);
-078  }
-079
-080  /**
-081   * Resets internal state to same as 
given {@code tracker}, and change the deleted flag according
-082   * to the modified flag if {@code 
resetDelete} is true. Does deep copy of the bitmap.
-083   * p/
-084   * The {@code resetDelete} will be set 
to true when building cleanup tracker, please see the
-085   * comments in {@link 
BitSetNode#BitSetNode(BitSetNode, boolean)} to learn how we change the
-086   * deleted flag if {@code resetDelete} 
is true.
-087   */
-088  public void 
resetTo(ProcedureStoreTracker tracker, boolean resetDelete) {
-089reset();
-090this.partial = tracker.partial;
-091this.minModifiedProcId = 
tracker.minModifiedProcId;
-092this.maxModifiedProcId = 
tracker.maxModifiedProcId;
-093this.keepDeletes = 
tracker.keepDeletes;
-094for (Map.EntryLong, 
BitSetNode entry : tracker.map.entrySet()) {
-095  map.put(entry.getKey(), new 
BitSetNode(entry.getValue(), resetDelete));
-096}
-097  }
-098
-099  public void insert(long procId) {
-100insert(null, procId);
+026import java.util.function.BiFunction;
+027import java.util.stream.LongStream;
+028import 
org.apache.hadoop.hbase.procedure2.Procedure;
+029import 
org.apache.yetus.audience.InterfaceAudience;
+030import 
org.apache.yetus.audience.InterfaceStability;
+031
+032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+033
+034/**
+035 * Keeps track of live procedures.
+036 *
+037 * It can be used by the ProcedureStore 
to identify which procedures are already
+038 * deleted/completed to avoid the 
deserialization step on restart
+039 */
+040@InterfaceAudience.Private
+041@InterfaceStability.Evolving
+042public class ProcedureStoreTracker {
+043  // Key is procedure id corresponding to 
first bit of the bitmap.
+044  private final TreeMapLong, 
BitSetNode map = new TreeMap();
+045
+046  /**
+047   * If true, do not remove bits 
corresponding to deleted procedures. Note that this can result

[10/51] [partial] hbase-site git commit: Published site at 7adf590106826b9e4432cfeee06acdc0ccff8c6e.

2018-10-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/425db230/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureTree.TestProcedure.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureTree.TestProcedure.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureTree.TestProcedure.html
new file mode 100644
index 000..2811c6b
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureTree.TestProcedure.html
@@ -0,0 +1,245 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.procedure2.store.wal;
+019
+020import static 
org.junit.Assert.assertEquals;
+021
+022import java.io.IOException;
+023import java.io.UncheckedIOException;
+024import java.util.ArrayList;
+025import java.util.Arrays;
+026import java.util.List;
+027import java.util.stream.Collectors;
+028import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+029import 
org.apache.hadoop.hbase.procedure2.Procedure;
+030import 
org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
+031import 
org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
+032import 
org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+033import 
org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
+034import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
+035import 
org.apache.hadoop.hbase.testclassification.MasterTests;
+036import 
org.apache.hadoop.hbase.testclassification.SmallTests;
+037import org.junit.ClassRule;
+038import org.junit.Test;
+039import 
org.junit.experimental.categories.Category;
+040
+041import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+042
+043@Category({ MasterTests.class, 
SmallTests.class })
+044public class TestWALProcedureTree {
+045
+046  @ClassRule
+047  public static final HBaseClassTestRule 
CLASS_RULE =
+048
HBaseClassTestRule.forClass(TestWALProcedureTree.class);
+049
+050  public static final class TestProcedure 
extends ProcedureVoid {
+051
+052@Override
+053public void setProcId(long procId) 
{
+054  super.setProcId(procId);
+055}
+056
+057@Override
+058public void setParentProcId(long 
parentProcId) {
+059  
super.setParentProcId(parentProcId);
+060}
+061
+062@Override
+063public synchronized void 
addStackIndex(int index) {
+064  super.addStackIndex(index);
+065}
+066
+067@Override
+068protected ProcedureVoid[] 
execute(Void env)
+069throws ProcedureYieldException, 
ProcedureSuspendedException, InterruptedException {
+070  return null;
+071}
+072
+073@Override
+074protected void rollback(Void env) 
throws IOException, InterruptedException {
+075}
+076
+077@Override
+078protected boolean abort(Void env) {
+079  return false;
+080}
+081
+082@Override
+083protected void 
serializeStateData(ProcedureStateSerializer serializer) throws IOException {
+084}
+085
+086@Override
+087protected void 
deserializeStateData(ProcedureStateSerializer serializer) throws IOException 
{
+088}
+089  }
+090
+091  private TestProcedure createProc(long 
procId, long parentProcId) {
+092TestProcedure proc = new 
TestProcedure();
+093proc.setProcId(procId);
+094if (parentProcId != 
Procedure.NO_PROC_ID) {
+095  
proc.setParentProcId(parentProcId);
+096}
+097return proc;
+098  }
+099
+100  private 
ListProcedureProtos.Procedure toProtos(TestProcedure... procs) {
+101return Arrays.stream(procs).map(p 
- {
+102  try {
+103return 
ProcedureUtil.convertToProtoProcedure(p);
+104  } catch (IOException e) {
+105throw new 
UncheckedIOException(e);
+106  }
+107}).collect(Collectors.toList());
+108  }
+109
+110  private ListTestProcedure 
getProcs(ProcedureIterator iter) throws IOException {
+111ListTestProcedure procs = new 
ArrayList();
+112while 

[10/51] [partial] hbase-site git commit: Published site at 5fbb227deb365fe812d433fe39b85ac4b0ddee20.

2018-10-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
 
b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
index 71ab8c3..7e6be2b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RSGroupInfoManagerImpl.FailedOpenUpdaterThread
+private class RSGroupInfoManagerImpl.FailedOpenUpdaterThread
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread
 implements ServerListener
 
@@ -267,7 +267,7 @@ implements 
 
 waitInterval
-private finallong waitInterval
+private finallong waitInterval
 
 
 
@@ -276,7 +276,7 @@ implements 
 
 hasChanged
-private volatileboolean hasChanged
+private volatileboolean hasChanged
 
 
 
@@ -293,7 +293,7 @@ implements 
 
 FailedOpenUpdaterThread
-publicFailedOpenUpdaterThread(org.apache.hadoop.conf.Configurationconf)
+publicFailedOpenUpdaterThread(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -310,7 +310,7 @@ implements 
 
 serverAdded
-publicvoidserverAdded(ServerNameserverName)
+publicvoidserverAdded(ServerNameserverName)
 Description copied from 
interface:ServerListener
 The server has joined the cluster.
 
@@ -327,7 +327,7 @@ implements 
 
 serverRemoved
-publicvoidserverRemoved(ServerNameserverName)
+publicvoidserverRemoved(ServerNameserverName)
 Description copied from 
interface:ServerListener
 The server was removed from the cluster.
 
@@ -344,7 +344,7 @@ implements 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttps://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
@@ -359,7 +359,7 @@ implements 
 
 serverChanged
-publicvoidserverChanged()
+publicvoidserverChanged()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
 
b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
index f64b6c3..139397f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RSGroupInfoManagerImpl.RSGroupStartupWorker
+private class RSGroupInfoManagerImpl.RSGroupStartupWorker
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread
 
 
@@ -255,7 +255,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 LOG
-private finalorg.slf4j.Logger LOG
+private finalorg.slf4j.Logger LOG
 
 
 
@@ -264,7 +264,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 online
-private volatileboolean online
+private volatileboolean online
 
 
 
@@ -281,7 +281,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 RSGroupStartupWorker
-RSGroupStartupWorker()
+RSGroupStartupWorker()
 
 
 
@@ -298,7 +298,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttps://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
@@ -313,7 +313,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 waitForGroupTableOnline
-privatebooleanwaitForGroupTableOnline()
+privatebooleanwaitForGroupTableOnline()
 
 
 
@@ -322,7 +322,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 createRSGroupTable
-privatevoidcreateRSGroupTable()
+privatevoidcreateRSGroupTable()
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -336,7 +336,7 @@ extends 

[10/51] [partial] hbase-site git commit: Published site at 821e4d7de2d576189f4288d1c2acf9e9a9471f5c.

2018-10-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/323b17d9/testdevapidocs/src-html/org/apache/hadoop/hbase/client/RestoreSnapshotFromClientTestBase.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/RestoreSnapshotFromClientTestBase.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/RestoreSnapshotFromClientTestBase.html
new file mode 100644
index 000..0916093
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/RestoreSnapshotFromClientTestBase.html
@@ -0,0 +1,222 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.client;
+019
+020import java.io.IOException;
+021import 
org.apache.hadoop.conf.Configuration;
+022import 
org.apache.hadoop.hbase.HBaseTestingUtility;
+023import 
org.apache.hadoop.hbase.HConstants;
+024import 
org.apache.hadoop.hbase.TableName;
+025import 
org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
+026import 
org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
+027import 
org.apache.hadoop.hbase.util.Bytes;
+028import org.junit.After;
+029import org.junit.AfterClass;
+030import org.junit.Before;
+031import org.junit.BeforeClass;
+032import org.junit.Rule;
+033import org.junit.rules.TestName;
+034
+035/**
+036 * Base class for testing restore 
snapshot
+037 */
+038public class 
RestoreSnapshotFromClientTestBase {
+039  protected final static 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+040
+041  protected final byte[] FAMILY = 
Bytes.toBytes("cf");
+042  protected final byte[] TEST_FAMILY2 = 
Bytes.toBytes("cf2");
+043
+044  protected TableName tableName;
+045  protected byte[] emptySnapshot;
+046  protected byte[] snapshotName0;
+047  protected byte[] snapshotName1;
+048  protected byte[] snapshotName2;
+049  protected int snapshot0Rows;
+050  protected int snapshot1Rows;
+051  protected Admin admin;
+052
+053  @Rule
+054  public TestName name = new 
TestName();
+055
+056  @BeforeClass
+057  public static void setupCluster() 
throws Exception {
+058
setupConf(TEST_UTIL.getConfiguration());
+059TEST_UTIL.startMiniCluster(3);
+060  }
+061
+062  protected static void 
setupConf(Configuration conf) {
+063
TEST_UTIL.getConfiguration().setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, 
true);
+064
TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10);
+065
TEST_UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 100);
+066
TEST_UTIL.getConfiguration().setInt("hbase.client.pause", 250);
+067
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
6);
+068
TEST_UTIL.getConfiguration().setBoolean("hbase.master.enabletable.roundrobin", 
true);
+069  }
+070
+071  @AfterClass
+072  public static void tearDownAfterClass() 
throws Exception {
+073TEST_UTIL.shutdownMiniCluster();
+074  }
+075
+076  /**
+077   * Initialize the tests with a table 
filled with some data and two snapshots (snapshotName0,
+078   * snapshotName1) of different states. 
The tableName, snapshotNames and the number of rows in the
+079   * snapshot are initialized.
+080   */
+081  @Before
+082  public void setup() throws Exception 
{
+083this.admin = TEST_UTIL.getAdmin();
+084
+085long tid = 
System.currentTimeMillis();
+086tableName = 
TableName.valueOf(getValidMethodName() + "-" + tid);
+087emptySnapshot = 
Bytes.toBytes("emptySnaptb-" + tid);
+088snapshotName0 = 
Bytes.toBytes("snaptb0-" + tid);
+089snapshotName1 = 
Bytes.toBytes("snaptb1-" + tid);
+090snapshotName2 = 
Bytes.toBytes("snaptb2-" + tid);
+091
+092// create Table and disable it
+093createTable();
+094admin.disableTable(tableName);
+095
+096// take an empty snapshot
+097admin.snapshot(emptySnapshot, 
tableName);
+098
+099// enable table and insert data
+100admin.enableTable(tableName);
+101
SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 500, FAMILY);
+102try (Table table = 
TEST_UTIL.getConnection().getTable(tableName)) 

[10/51] [partial] hbase-site git commit: Published site at fa5fa6ecdd071b72b58971058ff3ab9d28c3e709.

2018-10-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d1341859/devapidocs/src-html/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html
index f9d05cb..67f0fc6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/JVMClusterUtil.RegionServerThread.html
@@ -31,316 +31,325 @@
 023import java.lang.reflect.Constructor;
 024import 
java.lang.reflect.InvocationTargetException;
 025import java.util.List;
-026
-027import 
org.apache.yetus.audience.InterfaceAudience;
-028import org.slf4j.Logger;
-029import org.slf4j.LoggerFactory;
-030import 
org.apache.hadoop.conf.Configuration;
-031import 
org.apache.hadoop.hbase.CoordinatedStateManager;
-032import 
org.apache.hadoop.hbase.master.HMaster;
-033import 
org.apache.hadoop.hbase.regionserver.HRegionServer;
-034
-035/**
-036 * Utility used running a cluster all in 
the one JVM.
-037 */
-038@InterfaceAudience.Private
-039public class JVMClusterUtil {
-040  private static final Logger LOG = 
LoggerFactory.getLogger(JVMClusterUtil.class);
-041
-042  /**
-043   * Datastructure to hold RegionServer 
Thread and RegionServer instance
-044   */
-045  public static class RegionServerThread 
extends Thread {
-046private final HRegionServer 
regionServer;
-047
-048public RegionServerThread(final 
HRegionServer r, final int index) {
-049  super(r, "RS:" + index + ";" + 
r.getServerName().toShortString());
-050  this.regionServer = r;
-051}
-052
-053/** @return the region server */
-054public HRegionServer 
getRegionServer() {
-055  return this.regionServer;
-056}
-057
-058/**
-059 * Block until the region server has 
come online, indicating it is ready
-060 * to be used.
-061 */
-062public void waitForServerOnline() {
-063  // The server is marked online 
after the init method completes inside of
-064  // the HRS#run method.  HRS#init 
can fail for whatever region.  In those
-065  // cases, we'll jump out of the run 
without setting online flag.  Check
-066  // stopRequested so we don't wait 
here a flag that will never be flipped.
-067  
regionServer.waitForServerOnline();
-068}
-069  }
-070
-071  /**
-072   * Creates a {@link 
RegionServerThread}.
-073   * Call 'start' on the returned thread 
to make it run.
-074   * @param c Configuration to use.
-075   * @param hrsc Class to create.
-076   * @param index Used distinguishing the 
object returned.
-077   * @throws IOException
-078   * @return Region server added.
-079   */
-080  public static 
JVMClusterUtil.RegionServerThread createRegionServerThread(final Configuration 
c,
-081  final Class? extends 
HRegionServer hrsc, final int index) throws IOException {
-082HRegionServer server;
-083try {
-084  Constructor? extends 
HRegionServer ctor = hrsc.getConstructor(Configuration.class);
-085  ctor.setAccessible(true);
-086  server = ctor.newInstance(c);
-087} catch (InvocationTargetException 
ite) {
-088  Throwable target = 
ite.getTargetException();
-089  throw new RuntimeException("Failed 
construction of RegionServer: " +
-090hrsc.toString() + 
((target.getCause() != null)?
-091  target.getCause().getMessage(): 
""), target);
-092} catch (Exception e) {
-093  IOException ioe = new 
IOException();
-094  ioe.initCause(e);
-095  throw ioe;
-096}
-097return new 
JVMClusterUtil.RegionServerThread(server, index);
-098  }
-099
+026import java.util.concurrent.TimeUnit;
+027import java.util.function.Supplier;
+028
+029import 
org.apache.yetus.audience.InterfaceAudience;
+030import org.slf4j.Logger;
+031import org.slf4j.LoggerFactory;
+032import 
org.apache.hadoop.conf.Configuration;
+033import 
org.apache.hadoop.hbase.master.HMaster;
+034import 
org.apache.hadoop.hbase.regionserver.HRegionServer;
+035
+036/**
+037 * Utility used running a cluster all in 
the one JVM.
+038 */
+039@InterfaceAudience.Private
+040public class JVMClusterUtil {
+041  private static final Logger LOG = 
LoggerFactory.getLogger(JVMClusterUtil.class);
+042
+043  /**
+044   * Datastructure to hold RegionServer 
Thread and RegionServer instance
+045   */
+046  public static class RegionServerThread 
extends Thread {
+047private final HRegionServer 
regionServer;
+048
+049public RegionServerThread(final 
HRegionServer r, final int index) {
+050  super(r, "RS:" + index + ";" + 
r.getServerName().toShortString());
+051  this.regionServer = r;
+052}
+053
+054/** @return the region server */
+055public HRegionServer 
getRegionServer() {
+056  return this.regionServer;
+057}
+058
+059/**
+060 * 

[10/51] [partial] hbase-site git commit: Published site at 6bc7089f9e0793efc9bdd46a84f5ccd9bc4579ad.

2018-09-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/testdevapidocs/org/apache/hadoop/hbase/class-use/ProcedureTestUtil.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/ProcedureTestUtil.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/ProcedureTestUtil.html
new file mode 100644
index 000..220f23b
--- /dev/null
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/ProcedureTestUtil.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.ProcedureTestUtil (Apache HBase 
3.0.0-SNAPSHOT Test API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses of Classorg.apache.hadoop.hbase.ProcedureTestUtil
+
+No usage of 
org.apache.hadoop.hbase.ProcedureTestUtil
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.Phase.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.Phase.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.Phase.html
index 109adb8..228105e 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.Phase.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.Phase.html
@@ -159,22 +159,6 @@ the order they are declared.
 int
 ResourceCheckerJUnitListener.AvailableMemoryMBResourceAnalyzer.getVal(ResourceChecker.Phasephase)
 
-
-int
-ResourceCheckerJUnitListener.MaxHeapMemoryMBResourceAnalyzer.getVal(ResourceChecker.Phasephase)
-
-
-int
-ResourceCheckerJUnitListener.UsedHeapMemoryMBResourceAnalyzer.getVal(ResourceChecker.Phasephase)
-
-
-int
-ResourceCheckerJUnitListener.GCCountResourceAnalyzer.getVal(ResourceChecker.Phasephase)
-
-
-int
-ResourceCheckerJUnitListener.GCTimeSecondResourceAnalyzer.getVal(ResourceChecker.Phasephase)
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.ResourceAnalyzer.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.ResourceAnalyzer.html
 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.ResourceAnalyzer.html
index fe11c19..52d113a 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.ResourceAnalyzer.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/ResourceChecker.ResourceAnalyzer.html
@@ -108,39 +108,23 @@
 
 
 (package private) static class
-ResourceCheckerJUnitListener.GCCountResourceAnalyzer
-
-
-(package private) static class
-ResourceCheckerJUnitListener.GCTimeSecondResourceAnalyzer
-
-
-(package private) static class
 ResourceCheckerJUnitListener.MaxFileDescriptorResourceAnalyzer
 
 
 (package private) static class
-ResourceCheckerJUnitListener.MaxHeapMemoryMBResourceAnalyzer
-
-
-(package private) static class
 ResourceCheckerJUnitListener.OpenFileDescriptorResourceAnalyzer
 
-
-(package private) static class
-ResourceCheckerJUnitListener.ProcessCountResourceAnalyzer
-
 
 (package private) static class
-ResourceCheckerJUnitListener.SystemLoadAverageResourceAnalyzer
+ResourceCheckerJUnitListener.ProcessCountResourceAnalyzer
 
 
 (package private) static class
-ResourceCheckerJUnitListener.ThreadResourceAnalyzer
+ResourceCheckerJUnitListener.SystemLoadAverageResourceAnalyzer
 
 
 (package private) static class
-ResourceCheckerJUnitListener.UsedHeapMemoryMBResourceAnalyzer
+ResourceCheckerJUnitListener.ThreadResourceAnalyzer
 
 
 


[10/51] [partial] hbase-site git commit: Published site at d7e08317d2f214e4cca7b67578aba0ed7a567d54.

2018-09-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37cf49a6/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
index 566f410..da040ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
@@ -341,8361 +341,8425 @@
 333  private final int 
rowLockWaitDuration;
 334  static final int 
DEFAULT_ROWLOCK_WAIT_DURATION = 3;
 335
-336  // The internal wait duration to 
acquire a lock before read/update
-337  // from the region. It is not per row. 
The purpose of this wait time
-338  // is to avoid waiting a long time 
while the region is busy, so that
-339  // we can release the IPC handler soon 
enough to improve the
-340  // availability of the region server. 
It can be adjusted by
-341  // tuning configuration 
"hbase.busy.wait.duration".
-342  final long busyWaitDuration;
-343  static final long 
DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
-344
-345  // If updating multiple rows in one 
call, wait longer,
-346  // i.e. waiting for busyWaitDuration * 
# of rows. However,
-347  // we can limit the max multiplier.
-348  final int maxBusyWaitMultiplier;
-349
-350  // Max busy wait duration. There is no 
point to wait longer than the RPC
-351  // purge timeout, when a RPC call will 
be terminated by the RPC engine.
-352  final long maxBusyWaitDuration;
-353
-354  // Max cell size. If nonzero, the 
maximum allowed size for any given cell
-355  // in bytes
-356  final long maxCellSize;
-357
-358  // Number of mutations for minibatch 
processing.
-359  private final int miniBatchSize;
+336  private Path regionDir;
+337  private FileSystem walFS;
+338
+339  // The internal wait duration to 
acquire a lock before read/update
+340  // from the region. It is not per row. 
The purpose of this wait time
+341  // is to avoid waiting a long time 
while the region is busy, so that
+342  // we can release the IPC handler soon 
enough to improve the
+343  // availability of the region server. 
It can be adjusted by
+344  // tuning configuration 
"hbase.busy.wait.duration".
+345  final long busyWaitDuration;
+346  static final long 
DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
+347
+348  // If updating multiple rows in one 
call, wait longer,
+349  // i.e. waiting for busyWaitDuration * 
# of rows. However,
+350  // we can limit the max multiplier.
+351  final int maxBusyWaitMultiplier;
+352
+353  // Max busy wait duration. There is no 
point to wait longer than the RPC
+354  // purge timeout, when a RPC call will 
be terminated by the RPC engine.
+355  final long maxBusyWaitDuration;
+356
+357  // Max cell size. If nonzero, the 
maximum allowed size for any given cell
+358  // in bytes
+359  final long maxCellSize;
 360
-361  // negative number indicates infinite 
timeout
-362  static final long 
DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
-363  final ExecutorService 
rowProcessorExecutor = Executors.newCachedThreadPool();
-364
-365  private final 
ConcurrentHashMapRegionScanner, Long scannerReadPoints;
-366
-367  /**
-368   * The sequence ID that was 
enLongAddered when this region was opened.
-369   */
-370  private long openSeqNum = 
HConstants.NO_SEQNUM;
-371
-372  /**
-373   * The default setting for whether to 
enable on-demand CF loading for
-374   * scan requests to this region. 
Requests can override it.
-375   */
-376  private boolean 
isLoadingCfsOnDemandDefault = false;
-377
-378  private final AtomicInteger 
majorInProgress = new AtomicInteger(0);
-379  private final AtomicInteger 
minorInProgress = new AtomicInteger(0);
+361  // Number of mutations for minibatch 
processing.
+362  private final int miniBatchSize;
+363
+364  // negative number indicates infinite 
timeout
+365  static final long 
DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
+366  final ExecutorService 
rowProcessorExecutor = Executors.newCachedThreadPool();
+367
+368  private final 
ConcurrentHashMapRegionScanner, Long scannerReadPoints;
+369
+370  /**
+371   * The sequence ID that was 
enLongAddered when this region was opened.
+372   */
+373  private long openSeqNum = 
HConstants.NO_SEQNUM;
+374
+375  /**
+376   * The default setting for whether to 
enable on-demand CF loading for
+377   * scan requests to this region. 
Requests can override it.
+378   */
+379  private boolean 
isLoadingCfsOnDemandDefault = false;
 380
-381  //
-382  // Context: During replay we want to 
ensure that we do not lose any data. So, we
-383  // have to be conservative in how we 
replay wals. For each store, we calculate
-384  // the maxSeqId up to which the store 
was flushed. And, skip 

[10/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index 25f458d..20e3eaa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -28,3711 +28,3756 @@
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
 022import java.io.InterruptedIOException;
-023import 
java.lang.reflect.InvocationTargetException;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.UnknownHostException;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Collections;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Map.Entry;
-036import java.util.NavigableMap;
-037import java.util.Set;
-038import java.util.TreeSet;
-039import 
java.util.concurrent.ConcurrentHashMap;
-040import 
java.util.concurrent.ConcurrentMap;
-041import java.util.concurrent.TimeUnit;
-042import 
java.util.concurrent.atomic.AtomicBoolean;
-043import 
java.util.concurrent.atomic.AtomicLong;
-044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.lang3.mutable.MutableObject;
-046import 
org.apache.hadoop.conf.Configuration;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import org.apache.hadoop.hbase.Cell;
-052import 
org.apache.hadoop.hbase.CellScannable;
-053import 
org.apache.hadoop.hbase.CellScanner;
-054import 
org.apache.hadoop.hbase.CellUtil;
-055import 
org.apache.hadoop.hbase.CompareOperator;
-056import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-057import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-058import 
org.apache.hadoop.hbase.HBaseIOException;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-061import 
org.apache.hadoop.hbase.NotServingRegionException;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.RegionTooBusyException;
-064import org.apache.hadoop.hbase.Server;
-065import 
org.apache.hadoop.hbase.ServerName;
-066import 
org.apache.hadoop.hbase.TableName;
-067import 
org.apache.hadoop.hbase.UnknownScannerException;
-068import 
org.apache.hadoop.hbase.client.Append;
-069import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-070import 
org.apache.hadoop.hbase.client.Delete;
-071import 
org.apache.hadoop.hbase.client.Durability;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Increment;
-074import 
org.apache.hadoop.hbase.client.Mutation;
-075import 
org.apache.hadoop.hbase.client.Put;
-076import 
org.apache.hadoop.hbase.client.RegionInfo;
-077import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-078import 
org.apache.hadoop.hbase.client.Result;
-079import 
org.apache.hadoop.hbase.client.Row;
-080import 
org.apache.hadoop.hbase.client.RowMutations;
-081import 
org.apache.hadoop.hbase.client.Scan;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-084import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-085import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-086import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-087import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-088import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-089import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-090import 
org.apache.hadoop.hbase.io.TimeRange;
-091import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-093import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-094import 
org.apache.hadoop.hbase.ipc.QosPriority;
-095import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-097import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 

[10/51] [partial] hbase-site git commit: Published site at cd161d976ef47b84e904f2d54bac65d2f3417c2a.

2018-09-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fa1bebf8/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
index 2c14c50..43c66a8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
@@ -46,2104 +46,2113 @@
 038import 
java.util.concurrent.atomic.AtomicLong;
 039import java.util.stream.Collectors;
 040import java.util.stream.Stream;
-041import 
org.apache.hadoop.conf.Configuration;
-042import 
org.apache.hadoop.hbase.HConstants;
-043import 
org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
-044import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-045import 
org.apache.hadoop.hbase.procedure2.Procedure.LockState;
-046import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
-047import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
-048import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
-049import 
org.apache.hadoop.hbase.security.User;
-050import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-051import 
org.apache.hadoop.hbase.util.IdLock;
-052import 
org.apache.hadoop.hbase.util.NonceKey;
-053import 
org.apache.hadoop.hbase.util.Threads;
-054import 
org.apache.yetus.audience.InterfaceAudience;
-055import org.slf4j.Logger;
-056import org.slf4j.LoggerFactory;
-057
-058import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-059import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-060
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
-062
-063/**
-064 * Thread Pool that executes the 
submitted procedures.
-065 * The executor has a ProcedureStore 
associated.
-066 * Each operation is logged and on 
restart the pending procedures are resumed.
-067 *
-068 * Unless the Procedure code throws an 
error (e.g. invalid user input)
-069 * the procedure will complete (at some 
point in time), On restart the pending
-070 * procedures are resumed and the once 
failed will be rolledback.
-071 *
-072 * The user can add procedures to the 
executor via submitProcedure(proc)
-073 * check for the finished state via 
isFinished(procId)
-074 * and get the result via 
getResult(procId)
-075 */
-076@InterfaceAudience.Private
-077public class 
ProcedureExecutorTEnvironment {
-078  private static final Logger LOG = 
LoggerFactory.getLogger(ProcedureExecutor.class);
-079
-080  public static final String 
CHECK_OWNER_SET_CONF_KEY = "hbase.procedure.check.owner.set";
-081  private static final boolean 
DEFAULT_CHECK_OWNER_SET = false;
-082
-083  public static final String 
WORKER_KEEP_ALIVE_TIME_CONF_KEY =
-084  
"hbase.procedure.worker.keep.alive.time.msec";
-085  private static final long 
DEFAULT_WORKER_KEEP_ALIVE_TIME = TimeUnit.MINUTES.toMillis(1);
-086
-087  /**
-088   * {@link #testing} is non-null when 
ProcedureExecutor is being tested. Tests will try to
-089   * break PE having it fail at various 
junctures. When non-null, testing is set to an instance of
-090   * the below internal {@link Testing} 
class with flags set for the particular test.
-091   */
-092  Testing testing = null;
-093
-094  /**
-095   * Class with parameters describing how 
to fail/die when in testing-context.
-096   */
-097  public static class Testing {
-098protected boolean killIfHasParent = 
true;
-099protected boolean killIfSuspended = 
false;
-100
-101/**
-102 * Kill the PE BEFORE we store state 
to the WAL. Good for figuring out if a Procedure is
-103 * persisting all the state it needs 
to recover after a crash.
-104 */
-105protected boolean 
killBeforeStoreUpdate = false;
-106protected boolean 
toggleKillBeforeStoreUpdate = false;
-107
-108/**
-109 * Set when we want to fail AFTER 
state has been stored into the WAL. Rarely used. HBASE-20978
-110 * is about a case where memory-state 
was being set after store to WAL where a crash could
-111 * cause us to get stuck. This flag 
allows killing at what was a vulnerable time.
-112 */
-113protected boolean 
killAfterStoreUpdate = false;
-114protected boolean 
toggleKillAfterStoreUpdate = false;
-115
-116protected boolean 
shouldKillBeforeStoreUpdate() {
-117  final boolean kill = 
this.killBeforeStoreUpdate;
-118  if 
(this.toggleKillBeforeStoreUpdate) {
-119this.killBeforeStoreUpdate = 
!kill;
-120LOG.warn("Toggle KILL before 
store update to: " + this.killBeforeStoreUpdate);
-121  }
-122  return kill;
-123}
-124
-125protected boolean 

[10/51] [partial] hbase-site git commit: Published site at c6a65ba63fce85ac7c4b62b96ef2bbe6c35d2f00.

2018-09-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/293abb17/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
index c372545..af3b364 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
@@ -1279,322 +1279,339 @@
 1271ListRegionInfo 
lastFewRegions = new ArrayList();
 1272// assign the remaining by going 
through the list and try to assign to servers one-by-one
 1273int serverIdx = 
RANDOM.nextInt(numServers);
-1274for (RegionInfo region : 
unassignedRegions) {
+1274OUTER : for (RegionInfo region : 
unassignedRegions) {
 1275  boolean assigned = false;
-1276  for (int j = 0; j  numServers; 
j++) { // try all servers one by one
+1276  INNER : for (int j = 0; j  
numServers; j++) { // try all servers one by one
 1277ServerName serverName = 
servers.get((j + serverIdx) % numServers);
 1278if 
(!cluster.wouldLowerAvailability(region, serverName)) {
 1279  ListRegionInfo 
serverRegions =
 1280  
assignments.computeIfAbsent(serverName, k - new ArrayList());
-1281  serverRegions.add(region);
-1282  cluster.doAssignRegion(region, 
serverName);
-1283  serverIdx = (j + serverIdx + 
1) % numServers; //remain from next server
-1284  assigned = true;
-1285  break;
-1286}
-1287  }
-1288  if (!assigned) {
-1289lastFewRegions.add(region);
-1290  }
-1291}
-1292// just sprinkle the rest of the 
regions on random regionservers. The balanceCluster will
-1293// make it optimal later. we can end 
up with this if numReplicas  numServers.
-1294for (RegionInfo region : 
lastFewRegions) {
-1295  int i = 
RANDOM.nextInt(numServers);
-1296  ServerName server = 
servers.get(i);
-1297  ListRegionInfo 
serverRegions = assignments.computeIfAbsent(server, k - new 
ArrayList());
-1298  serverRegions.add(region);
-1299  cluster.doAssignRegion(region, 
server);
-1300}
-1301return assignments;
-1302  }
-1303
-1304  protected Cluster 
createCluster(ListServerName servers, CollectionRegionInfo 
regions) {
-1305// Get the snapshot of the current 
assignments for the regions in question, and then create
-1306// a cluster out of it. Note that we 
might have replicas already assigned to some servers
-1307// earlier. So we want to get the 
snapshot to see those assignments, but this will only contain
-1308// replicas of the regions that are 
passed (for performance).
-1309MapServerName, 
ListRegionInfo clusterState = 
getRegionAssignmentsByServer(regions);
-1310
-1311for (ServerName server : servers) 
{
-1312  if 
(!clusterState.containsKey(server)) {
-1313clusterState.put(server, 
EMPTY_REGION_LIST);
-1314  }
-1315}
-1316return new Cluster(regions, 
clusterState, null, this.regionFinder,
-1317rackManager);
-1318  }
-1319
-1320  private ListServerName 
findIdleServers(ListServerName servers) {
-1321return 
this.services.getServerManager()
-1322
.getOnlineServersListWithPredicator(servers, IDLE_SERVER_PREDICATOR);
-1323  }
-1324
-1325  /**
-1326   * Used to assign a single region to a 
random server.
-1327   */
-1328  @Override
-1329  public ServerName 
randomAssignment(RegionInfo regionInfo, ListServerName servers)
-1330  throws HBaseIOException {
-1331
metricsBalancer.incrMiscInvocations();
-1332if (servers != null  
servers.contains(masterServerName)) {
-1333  if (shouldBeOnMaster(regionInfo)) 
{
-1334return masterServerName;
-1335  }
-1336  if 
(!LoadBalancer.isTablesOnMaster(getConf())) {
-1337// Guarantee we do not put any 
regions on master
-1338servers = new 
ArrayList(servers);
-1339
servers.remove(masterServerName);
-1340  }
-1341}
-1342
-1343int numServers = servers == null ? 0 
: servers.size();
-1344if (numServers == 0) {
-1345  LOG.warn("Wanted to retain 
assignment but no servers to assign to");
-1346  return null;
-1347}
-1348if (numServers == 1) { // Only one 
server, nothing fancy we can do here
-1349  return servers.get(0);
-1350}
-1351ListServerName idleServers = 
findIdleServers(servers);
-1352if (idleServers.size() == 1) {
-1353  return idleServers.get(0);
-1354}
-1355final ListServerName 
finalServers = idleServers.isEmpty() ?
-1356servers : idleServers;
-1357ListRegionInfo regions = 
Lists.newArrayList(regionInfo);
-1358Cluster 

[10/51] [partial] hbase-site git commit: Published site at 7c1fad4992a169a35b4457e6f4afcb30d04406e9.

2018-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
index d11176a..2c14c50 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.KeepAliveWorkerThread.html
@@ -982,1050 +982,1168 @@
 974  }
 975
 976  /**
-977   * Add a new root-procedure to the 
executor.
-978   * @param proc the new procedure to 
execute.
-979   * @param nonceKey the registered 
unique identifier for this operation from the client or process.
-980   * @return the procedure id, that can 
be used to monitor the operation
-981   */
-982  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH",
-983  justification = "FindBugs is blind 
to the check-for-null")
-984  public long 
submitProcedure(ProcedureTEnvironment proc, NonceKey nonceKey) {
-985
Preconditions.checkArgument(lastProcId.get() = 0);
-986
-987prepareProcedure(proc);
-988
-989final Long currentProcId;
-990if (nonceKey != null) {
-991  currentProcId = 
nonceKeysToProcIdsMap.get(nonceKey);
-992  
Preconditions.checkArgument(currentProcId != null,
-993"Expected nonceKey=" + nonceKey + 
" to be reserved, use registerNonce(); proc=" + proc);
-994} else {
-995  currentProcId = nextProcId();
-996}
-997
-998// Initialize the procedure
-999proc.setNonceKey(nonceKey);
-1000
proc.setProcId(currentProcId.longValue());
-1001
-1002// Commit the transaction
-1003store.insert(proc, null);
-1004LOG.debug("Stored {}", proc);
-1005
-1006// Add the procedure to the 
executor
-1007return pushProcedure(proc);
-1008  }
-1009
-1010  /**
-1011   * Add a set of new root-procedure to 
the executor.
-1012   * @param procs the new procedures to 
execute.
-1013   */
-1014  // TODO: Do we need to take nonces 
here?
-1015  public void 
submitProcedures(ProcedureTEnvironment[] procs) {
-1016
Preconditions.checkArgument(lastProcId.get() = 0);
-1017if (procs == null || procs.length 
= 0) {
-1018  return;
-1019}
-1020
-1021// Prepare procedure
-1022for (int i = 0; i  procs.length; 
++i) {
-1023  
prepareProcedure(procs[i]).setProcId(nextProcId());
-1024}
-1025
-1026// Commit the transaction
-1027store.insert(procs);
-1028if (LOG.isDebugEnabled()) {
-1029  LOG.debug("Stored " + 
Arrays.toString(procs));
-1030}
-1031
-1032// Add the procedure to the 
executor
-1033for (int i = 0; i  procs.length; 
++i) {
-1034  pushProcedure(procs[i]);
-1035}
-1036  }
-1037
-1038  private ProcedureTEnvironment 
prepareProcedure(ProcedureTEnvironment proc) {
-1039
Preconditions.checkArgument(proc.getState() == ProcedureState.INITIALIZING);
-1040
Preconditions.checkArgument(!proc.hasParent(), "unexpected parent", proc);
-1041if (this.checkOwnerSet) {
-1042  
Preconditions.checkArgument(proc.hasOwner(), "missing owner");
-1043}
-1044return proc;
-1045  }
-1046
-1047  private long 
pushProcedure(ProcedureTEnvironment proc) {
-1048final long currentProcId = 
proc.getProcId();
+977   * Bypass a procedure. If the procedure 
is set to bypass, all the logic in
+978   * execute/rollback will be ignored and 
it will return success, whatever.
+979   * It is used to recover buggy stuck 
procedures, releasing the lock resources
+980   * and letting other procedures to run. 
Bypassing one procedure (and its ancestors will
+981   * be bypassed automatically) may leave 
the cluster in a middle state, e.g. region
+982   * not assigned, or some hdfs files 
left behind. After getting rid of those stuck procedures,
+983   * the operators may have to do some 
clean up on hdfs or schedule some assign procedures
+984   * to let region online. DO AT YOUR OWN 
RISK.
+985   * p
+986   * A procedure can be bypassed only 
if
+987   * 1. The procedure is in state of 
RUNNABLE, WAITING, WAITING_TIMEOUT
+988   * or it is a root procedure without 
any child.
+989   * 2. No other worker thread is 
executing it
+990   * 3. No child procedure has been 
submitted
+991   *
+992   * p
+993   * If all the requirements are meet, 
the procedure and its ancestors will be
+994   * bypassed and persisted to WAL.
+995   *
+996   * p
+997   * If the procedure is in WAITING 
state, will set it to RUNNABLE add it to run queue.
+998   * TODO: What about WAITING_TIMEOUT?
+999   * @param id the procedure id
+1000   * @param lockWait time to wait lock
+1001   * @param force if force set to true, 
we will bypass the procedure even if it is executing.
+1002   

[10/51] [partial] hbase-site git commit: Published site at 3afe9fb7e6ebfa71187cbe131558a83fae61cecd.

2018-08-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.MyObserver.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.MyObserver.html
 
b/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.MyObserver.html
index 0087030..0e52379 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.MyObserver.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.MyObserver.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class TestClientClusterStatus.MyObserver
+public static class TestClientClusterStatus.MyObserver
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, 
org.apache.hadoop.hbase.coprocessor.MasterObserver
 
@@ -255,7 +255,7 @@ implements 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 
 
 PRE_COUNT
-private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger PRE_COUNT
+private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger PRE_COUNT
 
 
 
@@ -264,7 +264,7 @@ implements 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 
 
 POST_COUNT
-private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger POST_COUNT
+private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger POST_COUNT
 
 
 
@@ -281,7 +281,7 @@ implements 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 
 
 MyObserver
-publicMyObserver()
+publicMyObserver()
 
 
 
@@ -298,7 +298,7 @@ implements 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 
 
 getMasterObserver
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in 
java.util">Optionalorg.apache.hadoop.hbase.coprocessor.MasterObservergetMasterObserver()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in 
java.util">Optionalorg.apache.hadoop.hbase.coprocessor.MasterObservergetMasterObserver()
 
 Specified by:
 getMasterObserverin 
interfaceorg.apache.hadoop.hbase.coprocessor.MasterCoprocessor
@@ -311,7 +311,7 @@ implements 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 
 
 preGetClusterMetrics
-publicvoidpreGetClusterMetrics(org.apache.hadoop.hbase.coprocessor.ObserverContextorg.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironmentctx)
+publicvoidpreGetClusterMetrics(org.apache.hadoop.hbase.coprocessor.ObserverContextorg.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironmentctx)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -327,7 +327,7 @@ implements 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 
 
 postGetClusterMetrics
-publicvoidpostGetClusterMetrics(org.apache.hadoop.hbase.coprocessor.ObserverContextorg.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironmentctx,
+publicvoidpostGetClusterMetrics(org.apache.hadoop.hbase.coprocessor.ObserverContextorg.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironmentctx,
   
org.apache.hadoop.hbase.ClusterMetricsstatus)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.html 
b/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.html
index c3f8173..38bad1f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/TestClientClusterStatus.html
@@ -386,7 +386,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testDefaults
-publicvoidtestDefaults()
+publicvoidtestDefaults()
   throws 

[10/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
index dae8455..f103137 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/TimestampsFilter.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":42,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class TimestampsFilter
+public class TimestampsFilter
 extends FilterBase
 Filter that returns only cells whose timestamp (version) is
  in the specified list of timestamps (versions).
@@ -239,60 +239,68 @@ extends createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
+boolean
+equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+
+
 Filter.ReturnCode
 filterCell(Cellc)
 A way to filter based on the column family, column 
qualifier and/or the column value.
 
 
-
+
 Filter.ReturnCode
 filterKeyValue(Cellc)
 Deprecated.
 
 
-
+
 boolean
 filterRowKey(Cellcell)
 Filters a row based on the row key.
 
 
-
+
 long
 getMin()
 Gets the minimum timestamp requested by filter.
 
 
-
+
 Cell
 getNextCellHint(CellcurrentCell)
 Pick the next cell that the scanner should seek to.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getTimestamps()
 
-
+
+int
+hashCode()
+
+
 private void
 init()
 
-
+
 static TimestampsFilter
 parseFrom(byte[]pbBytes)
 
-
+
 byte[]
 toByteArray()
 Return length 0 byte array for Filters that don't require 
special serialization
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Return filter's info for debugging and logging 
purpose.
 
 
-
+
 protected https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString(intmaxTimestamps)
 
@@ -316,7 +324,7 @@ extends 
 
 Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, 

[10/51] [partial] hbase-site git commit: Published site at 6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4.

2018-08-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/license.html
--
diff --git a/license.html b/license.html
index 4e3807b..3c955ea 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Licenses
 
@@ -491,7 +491,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/mail-lists.html
--
diff --git a/mail-lists.html b/mail-lists.html
index 08614e5..0662a3b 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Mailing Lists
 
@@ -341,7 +341,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/metrics.html
--
diff --git a/metrics.html b/metrics.html
index b827866..dbd21b0 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) Metrics
@@ -459,7 +459,7 @@ export HBASE_REGIONSERVER_OPTS=$HBASE_JMX_OPTS 
-Dcom.sun.management.jmxrem
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/old_news.html
--
diff --git a/old_news.html b/old_news.html
index 3d1136e..06bb751 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Old Apache HBase (TM) News
@@ -440,7 +440,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/plugin-management.html
--
diff --git a/plugin-management.html b/plugin-management.html
index 16121d6..bd1c48c 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Plugin Management
 
@@ -440,7 +440,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/plugins.html
--
diff --git a/plugins.html b/plugins.html
index bdb9cf8..03aa14b 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Plugins
 
@@ -375,7 +375,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/poweredbyhbase.html
--
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index f434aca..f1edaaa 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Powered By Apache HBase™
 
@@ -769,7 +769,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-08-22
+  Last Published: 
2018-08-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/project-info.html
--
diff --git a/project-info.html b/project-info.html
index 

[10/51] [partial] hbase-site git commit: Published site at 63f2d3cbdc8151f5f61f33e0a078c51b9ac076a5.

2018-08-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
index a274fa7..c18c84e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/ProcedurePrepareLatch.html
@@ -185,15 +185,9 @@
 
 
 private ProcedurePrepareLatch
-RecoverMetaProcedure.syncLatch
-Deprecated.
-
-
-
-private ProcedurePrepareLatch
 AbstractStateMachineTableProcedure.syncLatch
 
-
+
 private ProcedurePrepareLatch
 AbstractStateMachineNamespaceProcedure.syncLatch
 
@@ -309,14 +303,6 @@
 ProcedurePrepareLatchlatch)
 
 
-RecoverMetaProcedure(ServerNamefailedMetaServer,
-booleanshouldSplitLog,
-ProcedurePrepareLatchlatch)
-Deprecated.
-Constructor with latch, for blocking/ sync usage
-
-
-
 TruncateTableProcedure(MasterProcedureEnvenv,
   TableNametableName,
   booleanpreserveSplits,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
index 0c685e9..29849e8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
@@ -116,15 +116,21 @@
 
 
 TableProcedureInterface.TableOperationType
-UnassignProcedure.getTableOperationType()
+UnassignProcedure.getTableOperationType()
+Deprecated.
+
 
 
 TableProcedureInterface.TableOperationType
-MoveRegionProcedure.getTableOperationType()
+MoveRegionProcedure.getTableOperationType()
+Deprecated.
+
 
 
 TableProcedureInterface.TableOperationType
-AssignProcedure.getTableOperationType()
+AssignProcedure.getTableOperationType()
+Deprecated.
+
 
 
 TableProcedureInterface.TableOperationType
@@ -132,12 +138,24 @@
 
 
 TableProcedureInterface.TableOperationType
-MergeTableRegionsProcedure.getTableOperationType()
+CloseRegionProcedure.getTableOperationType()
 
 
 TableProcedureInterface.TableOperationType
+MergeTableRegionsProcedure.getTableOperationType()
+
+
+TableProcedureInterface.TableOperationType
 GCMergedRegionsProcedure.getTableOperationType()
 
+
+TableProcedureInterface.TableOperationType
+TransitRegionStateProcedure.getTableOperationType()
+
+
+TableProcedureInterface.TableOperationType
+OpenRegionProcedure.getTableOperationType()
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.html
index e25c713..7a5fc85 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.html
@@ -113,49 +113,81 @@
 
 class
 AssignProcedure
-Procedure that describe the assignment of a single 
region.
+Deprecated.
+Do not use any 
more.
+
 
 
 
 class
+CloseRegionProcedure
+The remote procedure used to close a region.
+
+
+
+class
 GCMergedRegionsProcedure
 GC regions that have been Merged.
 
 
-
+
 class
 GCRegionProcedure
 GC a Region that is no longer in use.
 
 
-
+
 class
 MergeTableRegionsProcedure
 The procedure to Merge a region in a table.
 
 
-
+
 class
 MoveRegionProcedure
-Procedure that implements a RegionPlan.
+Deprecated.
+Do not use any 
more.
+
+
+
+
+class
+OpenRegionProcedure
+The remote procedure used to open a region.
 
 
 
 class
-RegionTransitionProcedure
-Base class for the Assign and Unassign Procedure.
+RegionRemoteProcedureBase
+The base class for the remote procedures used to open/close 
a region.
 
 
 
 class
+RegionTransitionProcedure
+Deprecated.
+Do not use any 
more.
+
+
+
+
+class
 SplitTableRegionProcedure
 The procedure to split a region in a table.
 
 
+
+class
+TransitRegionStateProcedure
+The procedure to deal with the state transition of a 
region.
+
+
 
 class
 UnassignProcedure
-Procedure that describes 

[10/51] [partial] hbase-site git commit: Published site at 092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.

2018-08-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
index 63e4b46..514f830 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
@@ -468,15 +468,15 @@
 460   * creating it if necessary.
 461   * @param logEntry
 462   * @param fileNameBeingSplit the file 
being split currently. Used to generate tmp file name.
-463   * @param conf
-464   * @return Path to file into which to 
dump split log edits.
-465   * @throws IOException
-466   */
-467  @SuppressWarnings("deprecation")
-468  @VisibleForTesting
-469  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
-470  Configuration conf)
-471  throws IOException {
+463   * @param tmpDirName of the directory 
used to sideline old recovered edits file
+464   * @param conf
+465   * @return Path to file into which to 
dump split log edits.
+466   * @throws IOException
+467   */
+468  @SuppressWarnings("deprecation")
+469  @VisibleForTesting
+470  static Path 
getRegionSplitEditsPath(final Entry logEntry, String fileNameBeingSplit,
+471  String tmpDirName, Configuration 
conf) throws IOException {
 472FileSystem fs = 
FileSystem.get(conf);
 473Path rootDir = 
FSUtils.getRootDir(conf);
 474Path tableDir = 
FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName());
@@ -491,7 +491,7 @@
 483  return null;
 484}
 485if (fs.exists(dir)  
fs.isFile(dir)) {
-486  Path tmp = new Path("/tmp");
+486  Path tmp = new Path(tmpDirName);
 487  if (!fs.exists(tmp)) {
 488fs.mkdirs(tmp);
 489  }
@@ -1520,411 +1520,413 @@
 1512 * @return a path with a write for 
that path. caller should close.
 1513 */
 1514WriterAndPath createWAP(byte[] 
region, Entry entry) throws IOException {
-1515  Path regionedits = 
getRegionSplitEditsPath(entry,
-1516  
fileBeingSplit.getPath().getName(), conf);
-1517  if (regionedits == null) {
-1518return null;
-1519  }
-1520  FileSystem rootFs = 
FileSystem.get(conf);
-1521  if (rootFs.exists(regionedits)) 
{
-1522LOG.warn("Found old edits file. 
It could be the "
-1523+ "result of a previous 
failed split attempt. Deleting " + regionedits + ", length="
-1524+ 
rootFs.getFileStatus(regionedits).getLen());
-1525if (!rootFs.delete(regionedits, 
false)) {
-1526  LOG.warn("Failed delete of old 
{}", regionedits);
-1527}
-1528  }
-1529  Writer w = 
createWriter(regionedits);
-1530  LOG.debug("Creating writer 
path={}", regionedits);
-1531  return new 
WriterAndPath(regionedits, w, entry.getKey().getSequenceId());
-1532}
-1533
-1534void filterCellByStore(Entry 
logEntry) {
-1535  Mapbyte[], Long 
maxSeqIdInStores =
-1536  
regionMaxSeqIdInStores.get(Bytes.toString(logEntry.getKey().getEncodedRegionName()));
-1537  if 
(MapUtils.isEmpty(maxSeqIdInStores)) {
-1538return;
-1539  }
-1540  // Create the array list for the 
cells that aren't filtered.
-1541  // We make the assumption that 
most cells will be kept.
-1542  ArrayListCell keptCells = 
new ArrayList(logEntry.getEdit().getCells().size());
-1543  for (Cell cell : 
logEntry.getEdit().getCells()) {
-1544if 
(CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
-1545  keptCells.add(cell);
-1546} else {
-1547  byte[] family = 
CellUtil.cloneFamily(cell);
-1548  Long maxSeqId = 
maxSeqIdInStores.get(family);
-1549  // Do not skip cell even if 
maxSeqId is null. Maybe we are in a rolling upgrade,
-1550  // or the master was crashed 
before and we can not get the information.
-1551  if (maxSeqId == null || 
maxSeqId.longValue()  logEntry.getKey().getSequenceId()) {
-1552keptCells.add(cell);
-1553  }
-1554}
-1555  }
-1556
-1557  // Anything in the keptCells array 
list is still live.
-1558  // So rather than removing the 
cells from the array list
-1559  // which would be an O(n^2) 
operation, we just replace the list
-1560  
logEntry.getEdit().setCells(keptCells);
-1561}
-1562
-1563@Override
-1564public void append(RegionEntryBuffer 
buffer) throws IOException {
-1565  appendBuffer(buffer, true);
-1566}
-1567
-1568WriterAndPath 
appendBuffer(RegionEntryBuffer buffer, boolean reusable) throws IOException{
-1569  ListEntry entries = 
buffer.entryBuffer;
-1570  if (entries.isEmpty()) {
-1571LOG.warn("got an empty buffer, 

[10/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

2018-08-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
index 69db023..59daaeb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.html
@@ -196,7 +196,7 @@
 188  throw new 
UnknownRegionException("No RegionState found for " + ri.getEncodedName());
 189}
 190if (!rs.isOpened()) {
-191  throw new 
DoNotRetryRegionException(ri.getEncodedName() + " is not OPEN");
+191  throw new 
DoNotRetryRegionException(ri.getEncodedName() + " is not OPEN; regionState=" + 
rs);
 192}
 193if (ri.isSplitParent()) {
 194  throw new 
DoNotRetryRegionException(ri.getEncodedName() +

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
index 32d662d..e5a5866 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
@@ -102,7 +102,7 @@
 094  }
 095
 096  // TODO: Move out... in the 
acquireLock()
-097  LOG.debug("Waiting for '" + 
getTableName() + "' regions in transition");
+097  LOG.debug("Waiting for RIT for 
{}", this);
 098  regions = 
env.getAssignmentManager().getRegionStates().getRegionsOfTable(getTableName());
 099  assert regions != null 
 !regions.isEmpty() : "unexpected 0 regions";
 100  
ProcedureSyncWait.waitRegionInTransition(env, regions);
@@ -113,29 +113,29 @@
 105  
setNextState(DeleteTableState.DELETE_TABLE_REMOVE_FROM_META);
 106  break;
 107case 
DELETE_TABLE_REMOVE_FROM_META:
-108  LOG.debug("delete '" + 
getTableName() + "' regions from META");
+108  LOG.debug("Deleting regions 
from META for {}", this);
 109  
DeleteTableProcedure.deleteFromMeta(env, getTableName(), regions);
 110  
setNextState(DeleteTableState.DELETE_TABLE_CLEAR_FS_LAYOUT);
 111  break;
 112case 
DELETE_TABLE_CLEAR_FS_LAYOUT:
-113  LOG.debug("delete '" + 
getTableName() + "' from filesystem");
+113  LOG.debug("Deleting regions 
from filesystem for {}", this);
 114  
DeleteTableProcedure.deleteFromFs(env, getTableName(), regions, true);
 115  
setNextState(DeleteTableState.DELETE_TABLE_UPDATE_DESC_CACHE);
 116  regions = null;
 117  break;
 118case 
DELETE_TABLE_UPDATE_DESC_CACHE:
-119  LOG.debug("delete '" + 
getTableName() + "' descriptor");
+119  LOG.debug("Deleting descriptor 
for {}", this);
 120  
DeleteTableProcedure.deleteTableDescriptorCache(env, getTableName());
 121  
setNextState(DeleteTableState.DELETE_TABLE_UNASSIGN_REGIONS);
 122  break;
 123case 
DELETE_TABLE_UNASSIGN_REGIONS:
-124  LOG.debug("delete '" + 
getTableName() + "' assignment state");
+124  LOG.debug("Deleting assignment 
state for {}", this);
 125  
DeleteTableProcedure.deleteAssignmentState(env, getTableName());
 126  
setNextState(DeleteTableState.DELETE_TABLE_POST_OPERATION);
 127  break;
 128case 
DELETE_TABLE_POST_OPERATION:
 129  postDelete(env);
-130  LOG.debug("delete '" + 
getTableName() + "' completed");
+130  LOG.debug("Finished {}", 
this);
 131  return Flow.NO_MORE_STATE;
 132default:
 133  throw new 
UnsupportedOperationException("unhandled state=" + state);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
index 2f83467..3e6a53e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
@@ -150,7 +150,7 @@
 142  if (isRollbackSupported(state)) {
 143  

[10/51] [partial] hbase-site git commit: Published site at ba5d1c1f28301adc99019d9d6c4a04fac98ae511.

2018-07-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index 95f2a65..073d0d0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -931,7 +931,7 @@
 923InitMetaProcedure initMetaProc = 
null;
 924if 
(assignmentManager.getRegionStates().getRegionState(RegionInfoBuilder.FIRST_META_REGIONINFO)
 925  .isOffline()) {
-926  OptionalProcedure? 
optProc = procedureExecutor.getProcedures().stream()
+926  
OptionalProcedureMasterProcedureEnv optProc = 
procedureExecutor.getProcedures().stream()
 927.filter(p - p instanceof 
InitMetaProcedure).findAny();
 928  if (optProc.isPresent()) {
 929initMetaProc = 
(InitMetaProcedure) optProc.get();
@@ -3210,566 +3210,567 @@
 3202  cpHost.preGetProcedures();
 3203}
 3204
-3205final ListProcedure? 
procList = this.procedureExecutor.getProcedures();
-3206
-3207if (cpHost != null) {
-3208  
cpHost.postGetProcedures(procList);
-3209}
-3210
-3211return procList;
-3212  }
-3213
-3214  @Override
-3215  public ListLockedResource 
getLocks() throws IOException {
-3216if (cpHost != null) {
-3217  cpHost.preGetLocks();
-3218}
-3219
-3220MasterProcedureScheduler 
procedureScheduler =
-3221  
procedureExecutor.getEnvironment().getProcedureScheduler();
-3222
-3223final ListLockedResource 
lockedResources = procedureScheduler.getLocks();
-3224
-3225if (cpHost != null) {
-3226  
cpHost.postGetLocks(lockedResources);
-3227}
-3228
-3229return lockedResources;
-3230  }
-3231
-3232  /**
-3233   * Returns the list of table 
descriptors that match the specified request
-3234   * @param namespace the namespace to 
query, or null if querying for all
-3235   * @param regex The regular expression 
to match against, or null if querying for all
-3236   * @param tableNameList the list of 
table names, or null if querying for all
-3237   * @param includeSysTables False to 
match only against userspace tables
-3238   * @return the list of table 
descriptors
-3239   */
-3240  public ListTableDescriptor 
listTableDescriptors(final String namespace, final String regex,
-3241  final ListTableName 
tableNameList, final boolean includeSysTables)
-3242  throws IOException {
-3243ListTableDescriptor htds = 
new ArrayList();
-3244if (cpHost != null) {
-3245  
cpHost.preGetTableDescriptors(tableNameList, htds, regex);
-3246}
-3247htds = getTableDescriptors(htds, 
namespace, regex, tableNameList, includeSysTables);
-3248if (cpHost != null) {
-3249  
cpHost.postGetTableDescriptors(tableNameList, htds, regex);
-3250}
-3251return htds;
-3252  }
-3253
-3254  /**
-3255   * Returns the list of table names 
that match the specified request
-3256   * @param regex The regular expression 
to match against, or null if querying for all
-3257   * @param namespace the namespace to 
query, or null if querying for all
-3258   * @param includeSysTables False to 
match only against userspace tables
-3259   * @return the list of table names
-3260   */
-3261  public ListTableName 
listTableNames(final String namespace, final String regex,
-3262  final boolean includeSysTables) 
throws IOException {
-3263ListTableDescriptor htds = 
new ArrayList();
-3264if (cpHost != null) {
-3265  cpHost.preGetTableNames(htds, 
regex);
-3266}
-3267htds = getTableDescriptors(htds, 
namespace, regex, null, includeSysTables);
-3268if (cpHost != null) {
-3269  cpHost.postGetTableNames(htds, 
regex);
-3270}
-3271ListTableName result = new 
ArrayList(htds.size());
-3272for (TableDescriptor htd: htds) 
result.add(htd.getTableName());
-3273return result;
-3274  }
-3275
-3276  /**
-3277   * @return list of table table 
descriptors after filtering by regex and whether to include system
-3278   *tables, etc.
-3279   * @throws IOException
-3280   */
-3281  private ListTableDescriptor 
getTableDescriptors(final ListTableDescriptor htds,
-3282  final String namespace, final 
String regex, final ListTableName tableNameList,
-3283  final boolean includeSysTables)
-3284  throws IOException {
-3285if (tableNameList == null || 
tableNameList.isEmpty()) {
-3286  // request for all 
TableDescriptors
-3287  CollectionTableDescriptor 
allHtds;
-3288  if (namespace != null  
namespace.length()  0) {
-3289// Do a check on the namespace 
existence. Will fail if does not exist.
-3290
this.clusterSchemaService.getNamespace(namespace);
-3291

[10/51] [partial] hbase-site git commit: Published site at b4759ce6e72f50ccd9d410bd5917dc5a515414f1.

2018-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ServerLocalityCostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ServerLocalityCostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ServerLocalityCostFunction.html
index 233dba3..91b9055 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ServerLocalityCostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ServerLocalityCostFunction.html
@@ -540,1205 +540,1204 @@
 532  
sm.getRegionMetrics().forEach((byte[] regionName, RegionMetrics rm) - {
 533DequeBalancerRegionLoad 
rLoads = oldLoads.get(Bytes.toString(regionName));
 534if (rLoads == null) {
-535  // There was nothing there
-536  rLoads = new 
ArrayDeque();
-537} else if (rLoads.size() = 
numRegionLoadsToRemember) {
-538  rLoads.remove();
-539}
-540rLoads.add(new 
BalancerRegionLoad(rm));
-541
loads.put(Bytes.toString(regionName), rLoads);
-542  });
-543});
-544
-545for(CostFromRegionLoadFunction cost : 
regionLoadFunctions) {
-546  cost.setLoads(loads);
-547}
-548  }
-549
-550  protected void initCosts(Cluster 
cluster) {
-551for (CostFunction c:costFunctions) 
{
-552  c.init(cluster);
-553}
-554  }
-555
-556  protected void 
updateCostsWithAction(Cluster cluster, Action action) {
-557for (CostFunction c : costFunctions) 
{
-558  c.postAction(action);
-559}
-560  }
-561
-562  /**
-563   * Get the names of the cost 
functions
-564   */
-565  public String[] getCostFunctionNames() 
{
-566if (costFunctions == null) return 
null;
-567String[] ret = new 
String[costFunctions.length];
-568for (int i = 0; i  
costFunctions.length; i++) {
-569  CostFunction c = 
costFunctions[i];
-570  ret[i] = 
c.getClass().getSimpleName();
-571}
-572
-573return ret;
-574  }
-575
-576  /**
-577   * This is the main cost function.  It 
will compute a cost associated with a proposed cluster
-578   * state.  All different costs will be 
combined with their multipliers to produce a double cost.
-579   *
-580   * @param cluster The state of the 
cluster
-581   * @param previousCost the previous 
cost. This is used as an early out.
-582   * @return a double of a cost 
associated with the proposed cluster state.  This cost is an
-583   * aggregate of all individual 
cost functions.
-584   */
-585  protected double computeCost(Cluster 
cluster, double previousCost) {
-586double total = 0;
-587
-588for (int i = 0; i  
costFunctions.length; i++) {
-589  CostFunction c = 
costFunctions[i];
-590  this.tempFunctionCosts[i] = 0.0;
-591
-592  if (c.getMultiplier() = 0) {
-593continue;
-594  }
-595
-596  Float multiplier = 
c.getMultiplier();
-597  Double cost = c.cost();
-598
-599  this.tempFunctionCosts[i] = 
multiplier*cost;
-600  total += 
this.tempFunctionCosts[i];
-601
-602  if (total  previousCost) {
-603break;
-604  }
-605}
-606
-607return total;
-608  }
-609
-610  /** Generates a candidate action to be 
applied to the cluster for cost function search */
-611  abstract static class 
CandidateGenerator {
-612abstract Cluster.Action 
generate(Cluster cluster);
-613
-614/**
-615 * From a list of regions pick a 
random one. Null can be returned which
-616 * {@link 
StochasticLoadBalancer#balanceCluster(Map)} recognize as signal to try a region 
move
-617 * rather than swap.
-618 *
-619 * @param clusterThe state of 
the cluster
-620 * @param server index of the 
server
-621 * @param chanceOfNoSwap Chance that 
this will decide to try a move rather
-622 *   than a 
swap.
-623 * @return a random {@link 
RegionInfo} or null if an asymmetrical move is
-624 * suggested.
-625 */
-626protected int 
pickRandomRegion(Cluster cluster, int server, double chanceOfNoSwap) {
-627  // Check to see if this is just a 
move.
-628  if 
(cluster.regionsPerServer[server].length == 0 || RANDOM.nextFloat()  
chanceOfNoSwap) {
-629// signal a move only.
-630return -1;
-631  }
-632  int rand = 
RANDOM.nextInt(cluster.regionsPerServer[server].length);
-633  return 
cluster.regionsPerServer[server][rand];
-634
-635}
-636protected int 
pickRandomServer(Cluster cluster) {
-637  if (cluster.numServers  1) {
-638return -1;
-639  }
-640
-641  return 
RANDOM.nextInt(cluster.numServers);
-642}
-643
-644protected int pickRandomRack(Cluster 
cluster) {
-645  if (cluster.numRacks  1) {
-646return 

[10/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0c6f447e/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 37e55ec..5cca62a 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1,10 +1,10 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
-接口 org.apache.hadoop.hbase.Cell的使用 (Apache HBase 
3.0.0-SNAPSHOT API)
+Uses of Interface org.apache.hadoop.hbase.Cell (Apache HBase 
3.0.0-SNAPSHOT API)
 
 
 
@@ -12,7 +12,7 @@
 
 
 
-您的浏览器已禁用 JavaScript。
+JavaScript is disabled on your browser.
 
 
 
 
 
-跳过导航链接
+Skip navigation links
 
 
 
-
-概览
-程序包
-ç±»
-使用
-树
-已过时
-索引
-帮助
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
 
 
 
 
-上一个
-下一个
+Prev
+Next
 
 
-框架
-无框架
+Frames
+NoFrames
 
 
-所有类
+AllClasses
 
 
 

[10/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5427a45e/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html 
b/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
index 166d35e..08f91e2 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/CacheEvictionStats.html
@@ -1,10 +1,10 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
-Uses of Class org.apache.hadoop.hbase.CacheEvictionStats (Apache HBase 
3.0.0-SNAPSHOT API)
+类 org.apache.hadoop.hbase.CacheEvictionStats的使用 (Apache HBase 
3.0.0-SNAPSHOT API)
 
 
 
@@ -12,7 +12,7 @@
 
 
 
-JavaScript is disabled on your browser.
+您的浏览器已禁用 JavaScript。
 
 
 
 
 
-Skip navigation links
+跳过导航链接
 
 
 
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
+
+概览
+程序包
+ç±»
+使用
+树
+已过时
+索引
+帮助
 
 
 
 
-Prev
-Next
+上一个
+下一个
 
 
-Frames
-NoFrames
+框架
+无框架
 
 
-AllClasses
+所有类
 
 
 
 
-

Uses of Class
org.apache.hadoop.hbase.CacheEvictionStats

+

类的使用
org.apache.hadoop.hbase.CacheEvictionStats

  • - - +
    Packages that use CacheEvictionStats 
    + - - + + @@ -96,32 +106,32 @@
  • -

    Uses of CacheEvictionStats in org.apache.hadoop.hbase.client

    -
  • 使用CacheEvictionStats的程序包  
    PackageDescription程序包说明
    org.apache.hadoop.hbase.client -
    Provides HBase Client
    +
    Provides HBase Client + +Table of Contents + + Overview +Example API Usage + + + Overview + To administer HBase, create and drop tables, list and alter tables, + use Admin.
    - +

    org.apache.hadoop.hbase.client中CacheEvictionStats的使用

    +
    Methods in org.apache.hadoop.hbase.client that return CacheEvictionStats 

    [10/51] [partial] hbase-site git commit: Published site at 0f23784182ab88649de340d75804e0ff20dcd0fc.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
    index 05e032c..40ef9f4 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
    @@ -25,767 +25,805 @@
     017 */
     018package 
    org.apache.hadoop.hbase.io.asyncfs;
     019
    -020import static 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
    -021import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
    +020import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
    +021import static 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;
     022
    -023import 
    org.apache.hbase.thirdparty.com.google.common.base.Charsets;
    -024import 
    org.apache.hbase.thirdparty.com.google.common.base.Throwables;
    -025import 
    org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
    -026import 
    org.apache.hbase.thirdparty.com.google.common.collect.Maps;
    -027import 
    com.google.protobuf.CodedOutputStream;
    -028
    -029import 
    org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
    -030import 
    org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;
    -031import 
    org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf;
    -032import 
    org.apache.hbase.thirdparty.io.netty.buffer.Unpooled;
    -033import 
    org.apache.hbase.thirdparty.io.netty.channel.Channel;
    -034import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler;
    -035import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;
    -036import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapter;
    -037import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;
    -038import 
    org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise;
    -039import 
    org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;
    -040import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.LengthFieldBasedFrameDecoder;
    -041import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder;
    -042import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;
    -043import 
    org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
    -044import 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;
    -045import 
    org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;
    -046import 
    org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;
    -047
    -048import java.io.IOException;
    -049import java.lang.reflect.Field;
    -050import 
    java.lang.reflect.InvocationTargetException;
    -051import java.lang.reflect.Method;
    -052import java.net.InetAddress;
    -053import java.net.InetSocketAddress;
    -054import java.nio.ByteBuffer;
    -055import 
    java.security.GeneralSecurityException;
    -056import java.util.Arrays;
    -057import java.util.Collections;
    -058import java.util.List;
    -059import java.util.Map;
    -060import java.util.Set;
    -061import java.util.concurrent.TimeUnit;
    -062import 
    java.util.concurrent.atomic.AtomicBoolean;
    -063
    -064import 
    javax.security.auth.callback.Callback;
    -065import 
    javax.security.auth.callback.CallbackHandler;
    -066import 
    javax.security.auth.callback.NameCallback;
    -067import 
    javax.security.auth.callback.PasswordCallback;
    -068import 
    javax.security.auth.callback.UnsupportedCallbackException;
    -069import 
    javax.security.sasl.RealmCallback;
    -070import 
    javax.security.sasl.RealmChoiceCallback;
    -071import javax.security.sasl.Sasl;
    -072import javax.security.sasl.SaslClient;
    -073import 
    javax.security.sasl.SaslException;
    -074
    -075import 
    org.apache.commons.codec.binary.Base64;
    -076import 
    org.apache.commons.lang3.StringUtils;
    -077import 
    org.apache.hadoop.conf.Configuration;
    -078import 
    org.apache.hadoop.crypto.CipherOption;
    -079import 
    org.apache.hadoop.crypto.CipherSuite;
    -080import 
    org.apache.hadoop.crypto.CryptoCodec;
    -081import 
    org.apache.hadoop.crypto.Decryptor;
    -082import 
    org.apache.hadoop.crypto.Encryptor;
    -083import 
    org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
    -084import 
    org.apache.hadoop.fs.FileEncryptionInfo;
    -085import 
    org.apache.yetus.audience.InterfaceAudience;
    -086import org.slf4j.Logger;
    -087import org.slf4j.LoggerFactory;
    -088
    -089import com.google.protobuf.ByteString;
    -090import 
    org.apache.hadoop.hdfs.DFSClient;
    -091import 
    org.apache.hadoop.hdfs.protocol.DatanodeInfo;
    -092import 
    org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
    -093import 

    [10/51] [partial] hbase-site git commit: Published site at 85b41f36e01214b6485c9352875c84ebf877dab3.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a5c66de0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    index c10cfbf..a3e2f4a 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
    @@ -3371,7 +3371,7 @@
     3363private V result = null;
     3364
     3365private final HBaseAdmin admin;
    -3366private final Long procId;
    +3366protected final Long procId;
     3367
     3368public ProcedureFuture(final 
    HBaseAdmin admin, final Long procId) {
     3369  this.admin = admin;
    @@ -3653,653 +3653,651 @@
     3645 * @return a description of the 
    operation
     3646 */
     3647protected String getDescription() 
    {
    -3648  return "Operation: " + 
    getOperationType() + ", "
    -3649  + "Table Name: " + 
    tableName.getNameWithNamespaceInclAsString();
    -3650
    -3651}
    -3652
    -3653protected abstract class 
    TableWaitForStateCallable implements WaitForStateCallable {
    -3654  @Override
    -3655  public void 
    throwInterruptedException() throws InterruptedIOException {
    -3656throw new 
    InterruptedIOException("Interrupted while waiting for operation: "
    -3657+ getOperationType() + " on 
    table: " + tableName.getNameWithNamespaceInclAsString());
    -3658  }
    -3659
    -3660  @Override
    -3661  public void 
    throwTimeoutException(long elapsedTime) throws TimeoutException {
    -3662throw new TimeoutException("The 
    operation: " + getOperationType() + " on table: " +
    -3663tableName.getNameAsString() 
    + " has not completed after " + elapsedTime + "ms");
    -3664  }
    -3665}
    -3666
    -3667@Override
    -3668protected V 
    postOperationResult(final V result, final long deadlineTs)
    -3669throws IOException, 
    TimeoutException {
    -3670  LOG.info(getDescription() + " 
    completed");
    -3671  return 
    super.postOperationResult(result, deadlineTs);
    -3672}
    -3673
    -3674@Override
    -3675protected V 
    postOperationFailure(final IOException exception, final long deadlineTs)
    -3676throws IOException, 
    TimeoutException {
    -3677  LOG.info(getDescription() + " 
    failed with " + exception.getMessage());
    -3678  return 
    super.postOperationFailure(exception, deadlineTs);
    -3679}
    -3680
    -3681protected void 
    waitForTableEnabled(final long deadlineTs)
    -3682throws IOException, 
    TimeoutException {
    -3683  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3684@Override
    -3685public boolean checkState(int 
    tries) throws IOException {
    -3686  try {
    -3687if 
    (getAdmin().isTableAvailable(tableName)) {
    -3688  return true;
    -3689}
    -3690  } catch 
    (TableNotFoundException tnfe) {
    -3691LOG.debug("Table " + 
    tableName.getNameWithNamespaceInclAsString()
    -3692+ " was not enabled, 
    sleeping. tries=" + tries);
    -3693  }
    -3694  return false;
    -3695}
    -3696  });
    -3697}
    -3698
    -3699protected void 
    waitForTableDisabled(final long deadlineTs)
    -3700throws IOException, 
    TimeoutException {
    -3701  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3702@Override
    -3703public boolean checkState(int 
    tries) throws IOException {
    -3704  return 
    getAdmin().isTableDisabled(tableName);
    -3705}
    -3706  });
    -3707}
    -3708
    -3709protected void 
    waitTableNotFound(final long deadlineTs)
    -3710throws IOException, 
    TimeoutException {
    -3711  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3712@Override
    -3713public boolean checkState(int 
    tries) throws IOException {
    -3714  return 
    !getAdmin().tableExists(tableName);
    -3715}
    -3716  });
    -3717}
    -3718
    -3719protected void 
    waitForSchemaUpdate(final long deadlineTs)
    -3720throws IOException, 
    TimeoutException {
    -3721  waitForState(deadlineTs, new 
    TableWaitForStateCallable() {
    -3722@Override
    -3723public boolean checkState(int 
    tries) throws IOException {
    -3724  return 
    getAdmin().getAlterStatus(tableName).getFirst() == 0;
    -3725}
    -3726  });
    -3727}
    -3728
    -3729protected void 
    waitForAllRegionsOnline(final long deadlineTs, final byte[][] splitKeys)
    -3730throws IOException, 
    TimeoutException {
    -3731  final TableDescriptor desc = 
    getTableDescriptor();
    -3732  final AtomicInteger actualRegCount 
    = new AtomicInteger(0);
    -3733  final MetaTableAccessor.Visitor 
    visitor = new MetaTableAccessor.Visitor() {
    -3734@Override
    -3735public boolean visit(Result 
    

    [10/51] [partial] hbase-site git commit: Published site at 6198e1fc7dfa85c3bc6b2855f9a5fb5f4b2354ff.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html 
    b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    index e2c59f6..0691797 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6};
    +var methods = 
    {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],4:["t3","Abstract Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public interface MasterServices
    +public interface MasterServices
     extends Server
     A curated subset of services provided by HMaster.
      For use internally only. Passed to Managers, Services and Chores so can pass 
    less-than-a
    @@ -343,52 +343,58 @@ extends getSnapshotManager()
     
     
    +SyncReplicationReplayWALManager
    +getSyncReplicationReplayWALManager()
    +Returns the SyncReplicationReplayWALManager.
    +
    +
    +
     TableDescriptors
     getTableDescriptors()
     
    -
    +
     TableStateManager
     getTableStateManager()
     
    -
    +
     boolean
     isActiveMaster()
     
    -
    +
     boolean
     isClusterUp()
     
    -
    +
     boolean
     isInitialized()
     
    -
    +
     boolean
     isInMaintenanceMode()
     
    -
    +
     boolean
     isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
     
    -
    +
     https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListReplicationPeerDescription
     listReplicationPeers(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringregex)
     Return a list of replication peers.
     
     
    -
    +
     https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTableDescriptor
     listTableDescriptorsByNamespace(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
     Get list of table descriptors by namespace
     
     
    -
    +
     https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListTableName
     listTableNamesByNamespace(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringname)
     Get list of table names by namespace
     
     
    -
    +
     long
     mergeRegions(RegionInfo[]regionsToMerge,
     booleanforcible,
    @@ -397,7 +403,7 @@ extends Merge regions in a table.
     
     
    -
    +
     long
     modifyColumn(TableNametableName,
     ColumnFamilyDescriptordescriptor,
    @@ -406,7 +412,7 @@ extends Modify the column descriptor of an existing column in an 
    existing table
     
     
    -
    +
     long
     modifyTable(TableNametableName,
    TableDescriptordescriptor,
    @@ -415,19 +421,19 @@ extends Modify the descriptor of an existing table
     
     
    -
    +
     boolean
     registerService(com.google.protobuf.Serviceinstance)
     Registers a new protocol buffer Service 
    subclass as a master coprocessor endpoint.
     
     
    -
    +
     long
     removeReplicationPeer(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringpeerId)
     Removes a peer and stops the replication
     
     
    -
    +
     long
     splitRegion(RegionInforegionInfo,
    byte[]splitRow,
    @@ -436,7 +442,14 @@ extends Split a region.
     
     
    -
    +
    +long
    +transitReplicationPeerSyncReplicationState(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringpeerId,
    +  SyncReplicationStateclusterState)
    +Set current cluster state for a synchronous replication 
    peer.
    +
    +
    +
     long
     truncateTable(TableNametableName,
      booleanpreserveSplits,
    @@ -445,7 +458,7 @@ extends Truncate a table
     
     
    -
    +
     long
     

    [10/51] [partial] hbase-site git commit: Published site at 14087cc919da9f2e0b1a68f701f6365ad9d1d71f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
     
    b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
    index 374e848..caab1e7 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
     109":10};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
     109":10,"i110":10};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -213,179 +213,183 @@ implements 
     private long
    -dataInMemoryWithoutWAL
    +cpRequestsCount
     
     
    +private long
    +dataInMemoryWithoutWAL
    +
    +
     private 
    org.apache.hadoop.hdfs.DFSHedgedReadMetrics
     dfsHedgedReadMetrics
     Can be null if not on hdfs.
     
     
    -
    +
     private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ScheduledExecutorService.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">ScheduledExecutorService
     executor
     
    -
    +
     private long
     filteredReadRequestsCount
     
    -
    +
     private long
     flushedCellsCount
     
    -
    +
     private long
     flushedCellsSize
     
    -
    +
     private CacheStats
     l1Stats
     
    -
    +
     private CacheStats
     l2Stats
     
    -
    +
     private static org.slf4j.Logger
     LOG
     
    -
    +
     private long
     majorCompactedCellsCount
     
    -
    +
     private long
     majorCompactedCellsSize
     
    -
    +
     private long
     maxStoreFileAge
     
    -
    +
     private long
     memstoreSize
     
    -
    +
     private MetricsWALSource
     metricsWALSource
     
    -
    +
     private long
     minStoreFileAge
     
    -
    +
     private MobFileCache
     mobFileCache
     
    -
    +
     private long
     mobFileCacheAccessCount
     
    -
    +
     private long
     mobFileCacheCount
     
    -
    +
     private long
     mobFileCacheEvictedCount
     
    -
    +
     private double
     mobFileCacheHitRatio
     
    -
    +
     private long
     mobFileCacheMissCount
     
    -
    +
     private long
     mobFlushCount
     
    -
    +
     private long
     mobFlushedCellsCount
     
    -
    +
     private long
     mobFlushedCellsSize
     
    -
    +
     private long
     mobScanCellsCount
     
    -
    +
     private long
     mobScanCellsSize
     
    -
    +
     private long
     numMutationsWithoutWAL
     
    -
    +
     private long
     numReferenceFiles
     
    -
    +
     private long
     numStoreFiles
     
    -
    +
     private long
     numStores
     
    -
    +
     private long
     numWALFiles
     
    -
    +
     private double
     percentFileLocal
     
    -
    +
     private double
     percentFileLocalSecondaryRegions
     
    -
    +
     private long
     period
     
    -
    +
     private long
     readRequestsCount
     
    -
    +
     private HRegionServer
     regionServer
     
    -
    +
     private double
     requestsPerSecond
     
    -
    +
     private https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
     title="class or interface in java.lang">Runnable
     runnable
     
    -
    +
     private long
     storefileIndexSize
     
    -
    +
     private long
     storeFileSize
     
    -
    +
     private long
     totalStaticBloomSize
     
    -
    +
     private long
     

    [10/51] [partial] hbase-site git commit: Published site at 72784c2d836a4b977667449d3adec5e8d15453f5.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b11656f/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    index 74bacd8..546d2b6 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    @@ -2249,1468 +2249,1484 @@
     2241  }
     2242
     2243  @Override
    -2244  public long addColumn(
    -2245  final TableName tableName,
    -2246  final ColumnFamilyDescriptor 
    column,
    -2247  final long nonceGroup,
    -2248  final long nonce)
    -2249  throws IOException {
    -2250checkInitialized();
    -2251checkTableExists(tableName);
    -2252
    -2253TableDescriptor old = 
    getTableDescriptors().get(tableName);
    -2254if 
    (old.hasColumnFamily(column.getName())) {
    -2255  throw new 
    InvalidFamilyOperationException("Column family '" + column.getNameAsString()
    -2256  + "' in table '" + tableName + 
    "' already exists so cannot be added");
    -2257}
    +2244  public long addColumn(final TableName 
    tableName, final ColumnFamilyDescriptor column,
    +2245  final long nonceGroup, final long 
    nonce) throws IOException {
    +2246checkInitialized();
    +2247checkTableExists(tableName);
    +2248
    +2249return modifyTable(tableName, new 
    TableDescriptorGetter() {
    +2250
    +2251  @Override
    +2252  public TableDescriptor get() 
    throws IOException {
    +2253TableDescriptor old = 
    getTableDescriptors().get(tableName);
    +2254if 
    (old.hasColumnFamily(column.getName())) {
    +2255  throw new 
    InvalidFamilyOperationException("Column family '" + column.getNameAsString()
    +2256  + "' in table '" + 
    tableName + "' already exists so cannot be added");
    +2257}
     2258
    -2259TableDescriptor newDesc = 
    TableDescriptorBuilder
    -2260
    .newBuilder(old).setColumnFamily(column).build();
    -2261return modifyTable(tableName, 
    newDesc, nonceGroup, nonce);
    +2259return 
    TableDescriptorBuilder.newBuilder(old).setColumnFamily(column).build();
    +2260  }
    +2261}, nonceGroup, nonce);
     2262  }
     2263
    -2264  @Override
    -2265  public long modifyColumn(
    -2266  final TableName tableName,
    -2267  final ColumnFamilyDescriptor 
    descriptor,
    -2268  final long nonceGroup,
    -2269  final long nonce)
    -2270  throws IOException {
    -2271checkInitialized();
    -2272checkTableExists(tableName);
    -2273
    -2274TableDescriptor old = 
    getTableDescriptors().get(tableName);
    -2275if (! 
    old.hasColumnFamily(descriptor.getName())) {
    -2276  throw new 
    InvalidFamilyOperationException("Family '" + descriptor.getNameAsString()
    -2277  + "' does not exist, so it 
    cannot be modified");
    -2278}
    -2279
    -2280TableDescriptor td = 
    TableDescriptorBuilder
    -2281.newBuilder(old)
    -2282
    .modifyColumnFamily(descriptor)
    -2283.build();
    -2284
    -2285return modifyTable(tableName, td, 
    nonceGroup, nonce);
    -2286  }
    -2287
    -2288  @Override
    -2289  public long deleteColumn(
    -2290  final TableName tableName,
    -2291  final byte[] columnName,
    -2292  final long nonceGroup,
    -2293  final long nonce)
    -2294  throws IOException {
    -2295checkInitialized();
    -2296checkTableExists(tableName);
    -2297
    -2298TableDescriptor old = 
    getTableDescriptors().get(tableName);
    -2299
    -2300if (! 
    old.hasColumnFamily(columnName)) {
    -2301  throw new 
    InvalidFamilyOperationException("Family '" + Bytes.toString(columnName)
    -2302  + "' does not exist, so it 
    cannot be deleted");
    -2303}
    -2304if (old.getColumnFamilyCount() == 1) 
    {
    -2305  throw new 
    InvalidFamilyOperationException("Family '" + Bytes.toString(columnName)
    -2306  + "' is the only column family 
    in the table, so it cannot be deleted");
    -2307}
    -2308
    -2309TableDescriptor td = 
    TableDescriptorBuilder
    -2310
    .newBuilder(old).removeColumnFamily(columnName).build();
    -2311return modifyTable(tableName, td, 
    nonceGroup, nonce);
    -2312  }
    -2313
    -2314  @Override
    -2315  public long enableTable(final 
    TableName tableName, final long nonceGroup, final long nonce)
    -2316  throws IOException {
    -2317checkInitialized();
    -2318
    -2319return 
    MasterProcedureUtil.submitProcedure(
    -2320new 
    MasterProcedureUtil.NonceProcedureRunnable(this, nonceGroup, nonce) {
    -2321  @Override
    -2322  protected void run() throws 
    IOException {
    -2323
    getMaster().getMasterCoprocessorHost().preEnableTable(tableName);
    -2324
    -2325// Normally, it would make sense 
    for this authorization check to exist inside
    -2326// AccessController, but because 
    the authorization check is done based on internal state
    -2327// (rather than 

    [10/51] [partial] hbase-site git commit: Published site at 9101fc246f86445006bfbcdfda5cc495016dc280.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/65565d77/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
    --
    diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
    index eb16038..74bacd8 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
    @@ -6,7 +6,7 @@
     
     
     
    -001/*
    +001/**
     002 * Licensed to the Apache Software 
    Foundation (ASF) under one
     003 * or more contributor license 
    agreements.  See the NOTICE file
     004 * distributed with this work for 
    additional information
    @@ -48,692 +48,692 @@
     040import java.util.Map;
     041import java.util.Map.Entry;
     042import java.util.Objects;
    -043import java.util.Set;
    -044import 
    java.util.concurrent.ExecutionException;
    -045import java.util.concurrent.Future;
    -046import java.util.concurrent.TimeUnit;
    -047import 
    java.util.concurrent.TimeoutException;
    -048import 
    java.util.concurrent.atomic.AtomicInteger;
    -049import 
    java.util.concurrent.atomic.AtomicReference;
    -050import java.util.function.Function;
    -051import java.util.regex.Pattern;
    -052import java.util.stream.Collectors;
    -053import javax.servlet.ServletException;
    -054import javax.servlet.http.HttpServlet;
    -055import 
    javax.servlet.http.HttpServletRequest;
    -056import 
    javax.servlet.http.HttpServletResponse;
    -057import 
    org.apache.commons.lang3.StringUtils;
    -058import 
    org.apache.hadoop.conf.Configuration;
    -059import org.apache.hadoop.fs.Path;
    -060import 
    org.apache.hadoop.hbase.ClusterId;
    -061import 
    org.apache.hadoop.hbase.ClusterMetrics;
    -062import 
    org.apache.hadoop.hbase.ClusterMetrics.Option;
    -063import 
    org.apache.hadoop.hbase.ClusterMetricsBuilder;
    -064import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -065import 
    org.apache.hadoop.hbase.HBaseIOException;
    -066import 
    org.apache.hadoop.hbase.HBaseInterfaceAudience;
    -067import 
    org.apache.hadoop.hbase.HConstants;
    -068import 
    org.apache.hadoop.hbase.InvalidFamilyOperationException;
    -069import 
    org.apache.hadoop.hbase.MasterNotRunningException;
    -070import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -071import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -072import 
    org.apache.hadoop.hbase.PleaseHoldException;
    -073import 
    org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
    -074import 
    org.apache.hadoop.hbase.ScheduledChore;
    -075import 
    org.apache.hadoop.hbase.ServerName;
    -076import 
    org.apache.hadoop.hbase.TableDescriptors;
    -077import 
    org.apache.hadoop.hbase.TableName;
    -078import 
    org.apache.hadoop.hbase.TableNotDisabledException;
    -079import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -080import 
    org.apache.hadoop.hbase.UnknownRegionException;
    -081import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
    -082import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
    -083import 
    org.apache.hadoop.hbase.client.MasterSwitchType;
    -084import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -085import 
    org.apache.hadoop.hbase.client.Result;
    -086import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -087import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder;
    -088import 
    org.apache.hadoop.hbase.client.TableState;
    -089import 
    org.apache.hadoop.hbase.client.VersionInfoUtil;
    -090import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
    -091import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -092import 
    org.apache.hadoop.hbase.exceptions.MergeRegionException;
    -093import 
    org.apache.hadoop.hbase.executor.ExecutorType;
    -094import 
    org.apache.hadoop.hbase.favored.FavoredNodesManager;
    -095import 
    org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
    -096import 
    org.apache.hadoop.hbase.http.InfoServer;
    -097import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -098import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -099import 
    org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
    -100import 
    org.apache.hadoop.hbase.log.HBaseMarkers;
    -101import 
    org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
    -102import 
    org.apache.hadoop.hbase.master.assignment.AssignmentManager;
    -103import 
    org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
    -104import 
    org.apache.hadoop.hbase.master.assignment.RegionStates;
    -105import 
    org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
    -106import 
    org.apache.hadoop.hbase.master.balancer.BalancerChore;
    -107import 
    org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
    -108import 
    org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
    -109import 
    org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
    -110import 
    org.apache.hadoop.hbase.master.cleaner.CleanerChore;
    -111import 
    org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
    -112import 
    org.apache.hadoop.hbase.master.cleaner.LogCleaner;
    -113import 
    org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
    

    [10/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
    index 83c17c0..9df0225 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
    @@ -54,323 +54,362 @@
     046import org.apache.hadoop.io.IOUtils;
     047
     048import 
    org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
    -049
    +049import 
    org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
     050
    -051/**
    -052 * Compression in this class is lifted 
    off Compressor/KeyValueCompression.
    -053 * This is a pure coincidence... they are 
    independent and don't have to be compatible.
    -054 *
    -055 * This codec is used at server side for 
    writing cells to WAL as well as for sending edits
    -056 * as part of the distributed splitting 
    process.
    -057 */
    -058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
    -059  HBaseInterfaceAudience.PHOENIX, 
    HBaseInterfaceAudience.CONFIG})
    -060public class WALCellCodec implements 
    Codec {
    -061  /** Configuration key for the class to 
    use when encoding cells in the WAL */
    -062  public static final String 
    WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
    -063
    -064  protected final CompressionContext 
    compression;
    -065  protected final ByteStringUncompressor 
    statelessUncompressor = new ByteStringUncompressor() {
    -066@Override
    -067public byte[] uncompress(ByteString 
    data, Dictionary dict) throws IOException {
    -068  return 
    WALCellCodec.uncompressByteString(data, dict);
    -069}
    -070  };
    -071
    -072  /**
    -073   * bAll subclasses must 
    implement a no argument constructor/b
    -074   */
    -075  public WALCellCodec() {
    -076this.compression = null;
    -077  }
    -078
    -079  /**
    -080   * Default constructor - ball 
    subclasses must implement a constructor with this signature /b
    -081   * if they are to be dynamically loaded 
    from the {@link Configuration}.
    -082   * @param conf configuration to 
    configure ttthis/tt
    -083   * @param compression compression the 
    codec should support, can be ttnull/tt to indicate no
    -084   *  compression
    -085   */
    -086  public WALCellCodec(Configuration conf, 
    CompressionContext compression) {
    -087this.compression = compression;
    -088  }
    -089
    -090  public static String 
    getWALCellCodecClass(Configuration conf) {
    -091return 
    conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
    -092  }
    -093
    -094  /**
    -095   * Create and setup a {@link 
    WALCellCodec} from the {@code cellCodecClsName} and
    -096   * CompressionContext, if {@code 
    cellCodecClsName} is specified.
    -097   * Otherwise Cell Codec classname is 
    read from {@link Configuration}.
    -098   * Fully prepares the codec for use.
    -099   * @param conf {@link Configuration} to 
    read for the user-specified codec. If none is specified,
    -100   *  uses a {@link 
    WALCellCodec}.
    -101   * @param cellCodecClsName name of 
    codec
    -102   * @param compression compression the 
    codec should use
    -103   * @return a {@link WALCellCodec} ready 
    for use.
    -104   * @throws 
    UnsupportedOperationException if the codec cannot be instantiated
    -105   */
    -106
    -107  public static WALCellCodec 
    create(Configuration conf, String cellCodecClsName,
    -108  CompressionContext compression) 
    throws UnsupportedOperationException {
    -109if (cellCodecClsName == null) {
    -110  cellCodecClsName = 
    getWALCellCodecClass(conf);
    -111}
    -112return 
    ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
    -113{ Configuration.class, 
    CompressionContext.class }, new Object[] { conf, compression });
    -114  }
    -115
    -116  /**
    -117   * Create and setup a {@link 
    WALCellCodec} from the
    -118   * CompressionContext.
    -119   * Cell Codec classname is read from 
    {@link Configuration}.
    -120   * Fully prepares the codec for use.
    -121   * @param conf {@link Configuration} to 
    read for the user-specified codec. If none is specified,
    -122   *  uses a {@link 
    WALCellCodec}.
    -123   * @param compression compression the 
    codec should use
    -124   * @return a {@link WALCellCodec} ready 
    for use.
    -125   * @throws 
    UnsupportedOperationException if the codec cannot be instantiated
    -126   */
    -127  public static WALCellCodec 
    create(Configuration conf,
    -128  CompressionContext compression) 
    throws UnsupportedOperationException {
    -129String cellCodecClsName = 
    getWALCellCodecClass(conf);
    -130return 
    ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
    -131{ Configuration.class, 
    CompressionContext.class }, new Object[] { conf, compression });
    

    [10/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
    index ca94fbc..269fcc2 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
    @@ -207,114 +207,154 @@
     199  return false;
     200}
     201
    -202// Mark the region as CLOSING.
    -203
    env.getAssignmentManager().markRegionAsClosing(regionNode);
    -204
    -205// Add the close region operation the 
    the server dispatch queue.
    -206if (!addToRemoteDispatcher(env, 
    regionNode.getRegionLocation())) {
    -207  // If addToRemoteDispatcher fails, 
    it calls the callback #remoteCallFailed.
    -208}
    -209
    -210// Return true to keep the procedure 
    running.
    -211return true;
    -212  }
    -213
    -214  @Override
    -215  protected void finishTransition(final 
    MasterProcedureEnv env, final RegionStateNode regionNode)
    -216  throws IOException {
    -217AssignmentManager am = 
    env.getAssignmentManager();
    -218RegionInfo regionInfo = 
    getRegionInfo();
    -219
    -220if (!removeAfterUnassigning) {
    -221  
    am.markRegionAsClosed(regionNode);
    -222} else {
    -223  // Remove from in-memory states
    -224  
    am.getRegionStates().deleteRegion(regionInfo);
    -225  
    env.getMasterServices().getServerManager().removeRegion(regionInfo);
    -226  FavoredNodesManager fnm = 
    env.getMasterServices().getFavoredNodesManager();
    -227  if (fnm != null) {
    -228
    fnm.deleteFavoredNodesForRegions(Lists.newArrayList(regionInfo));
    -229  }
    -230}
    -231  }
    -232
    -233  @Override
    -234  public RemoteOperation 
    remoteCallBuild(final MasterProcedureEnv env, final ServerName serverName) {
    -235assert 
    serverName.equals(getRegionState(env).getRegionLocation());
    -236return new RegionCloseOperation(this, 
    getRegionInfo(), this.destinationServer);
    -237  }
    -238
    -239  @Override
    -240  protected void reportTransition(final 
    MasterProcedureEnv env, final RegionStateNode regionNode,
    -241  final TransitionCode code, final 
    long seqId) throws UnexpectedStateException {
    -242switch (code) {
    -243  case CLOSED:
    -244
    setTransitionState(RegionTransitionState.REGION_TRANSITION_FINISH);
    -245break;
    -246  default:
    -247throw new 
    UnexpectedStateException(String.format(
    -248  "Received report unexpected 
    transition state=%s for region=%s server=%s, expected CLOSED.",
    -249  code, 
    regionNode.getRegionInfo(), regionNode.getRegionLocation()));
    -250}
    -251  }
    -252
    -253  @Override
    -254  protected boolean 
    remoteCallFailed(final MasterProcedureEnv env, final RegionStateNode 
    regionNode,
    -255  final IOException exception) {
    -256// TODO: Is there on-going rpc to 
    cleanup?
    -257if (exception instanceof 
    ServerCrashException) {
    -258  // This exception comes from 
    ServerCrashProcedure AFTER log splitting.
    -259  // SCP found this region as a RIT. 
    Its call into here says it is ok to let this procedure go
    -260  // complete. This complete will 
    release lock on this region so subsequent action on region
    -261  // can succeed; e.g. the assign 
    that follows this unassign when a move (w/o wait on SCP
    -262  // the assign could run w/o logs 
    being split so data loss).
    -263  try {
    -264reportTransition(env, regionNode, 
    TransitionCode.CLOSED, HConstants.NO_SEQNUM);
    -265  } catch (UnexpectedStateException 
    e) {
    -266// Should never happen.
    -267throw new RuntimeException(e);
    -268  }
    -269} else if (exception instanceof 
    RegionServerAbortedException ||
    -270exception instanceof 
    RegionServerStoppedException ||
    -271exception instanceof 
    ServerNotRunningYetException) {
    -272  // RS is aborting, we cannot 
    offline the region since the region may need to do WAL
    -273  // recovery. Until we see the RS 
    expiration, we should retry.
    -274  // TODO: This should be suspend 
    like the below where we call expire on server?
    -275  LOG.info("Ignoring; waiting on 
    ServerCrashProcedure", exception);
    -276} else if (exception instanceof 
    NotServingRegionException) {
    -277  LOG.info("IS THIS OK? ANY LOGS TO 
    REPLAY; ACTING AS THOUGH ALL GOOD " + regionNode,
    -278exception);
    -279  
    setTransitionState(RegionTransitionState.REGION_TRANSITION_FINISH);
    -280} else {
    -281  LOG.warn("Expiring server " + this 
    + "; " + regionNode.toShortString() +
    -282", exception=" + exception);
    -283  
    env.getMasterServices().getServerManager().expireServer(regionNode.getRegionLocation());
    -284  // Return false so this procedure 
    stays in suspended state. It will be woken up by the
    

    [10/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    index d7e2bf4..dbfd9ee 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    @@ -103,2497 +103,2454 @@
     095import 
    org.apache.hadoop.hbase.io.hfile.HFile;
     096import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
     097import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -098import 
    org.apache.hadoop.hbase.net.Address;
    -099import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    -100import 
    org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
    -101import 
    org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
    -102import 
    org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
    -103import 
    org.apache.hadoop.hbase.regionserver.BloomType;
    -104import 
    org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker;
    -105import 
    org.apache.hadoop.hbase.regionserver.InternalScanner;
    -106import 
    org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
    -107import 
    org.apache.hadoop.hbase.regionserver.Region;
    -108import 
    org.apache.hadoop.hbase.regionserver.RegionScanner;
    -109import 
    org.apache.hadoop.hbase.regionserver.ScanType;
    -110import 
    org.apache.hadoop.hbase.regionserver.ScannerContext;
    -111import 
    org.apache.hadoop.hbase.regionserver.Store;
    -112import 
    org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
    -113import 
    org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
    -114import 
    org.apache.hadoop.hbase.replication.ReplicationEndpoint;
    -115import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
    -116import 
    org.apache.hadoop.hbase.security.AccessDeniedException;
    -117import 
    org.apache.hadoop.hbase.security.Superusers;
    -118import 
    org.apache.hadoop.hbase.security.User;
    -119import 
    org.apache.hadoop.hbase.security.UserProvider;
    -120import 
    org.apache.hadoop.hbase.security.access.Permission.Action;
    -121import 
    org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
    -122import 
    org.apache.hadoop.hbase.util.ByteRange;
    -123import 
    org.apache.hadoop.hbase.util.Bytes;
    -124import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -125import 
    org.apache.hadoop.hbase.util.Pair;
    -126import 
    org.apache.hadoop.hbase.util.SimpleMutableByteRange;
    -127import 
    org.apache.hadoop.hbase.wal.WALEdit;
    -128import 
    org.apache.hadoop.hbase.zookeeper.ZKWatcher;
    -129import 
    org.apache.yetus.audience.InterfaceAudience;
    -130import org.slf4j.Logger;
    -131import org.slf4j.LoggerFactory;
    -132
    -133import 
    org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap;
    -134import 
    org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
    -135import 
    org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap;
    -136import 
    org.apache.hbase.thirdparty.com.google.common.collect.Lists;
    -137import 
    org.apache.hbase.thirdparty.com.google.common.collect.MapMaker;
    -138import 
    org.apache.hbase.thirdparty.com.google.common.collect.Maps;
    -139import 
    org.apache.hbase.thirdparty.com.google.common.collect.Sets;
    -140
    -141/**
    -142 * Provides basic authorization checks 
    for data access and administrative
    -143 * operations.
    -144 *
    -145 * p
    -146 * {@code AccessController} performs 
    authorization checks for HBase operations
    -147 * based on:
    -148 * /p
    -149 * ul
    -150 *   lithe identity of the user 
    performing the operation/li
    -151 *   lithe scope over which the 
    operation is performed, in increasing
    -152 *   specificity: global, table, column 
    family, or qualifier/li
    -153 *   lithe type of action being 
    performed (as mapped to
    -154 *   {@link Permission.Action} 
    values)/li
    -155 * /ul
    -156 * p
    -157 * If the authorization check fails, an 
    {@link AccessDeniedException}
    -158 * will be thrown for the operation.
    -159 * /p
    -160 *
    -161 * p
    -162 * To perform authorization checks, 
    {@code AccessController} relies on the
    -163 * RpcServerEngine being loaded to 
    provide
    -164 * the user identities for remote 
    requests.
    -165 * /p
    -166 *
    -167 * p
    -168 * The access control lists used for 
    authorization can be manipulated via the
    -169 * exposed {@link AccessControlService} 
    Interface implementation, and the associated
    -170 * {@code grant}, {@code revoke}, and 
    {@code user_permission} HBase shell
    -171 * commands.
    -172 * /p
    -173 */
    -174@CoreCoprocessor
    -175@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
    -176public class AccessController implements 
    MasterCoprocessor, RegionCoprocessor,
    -177RegionServerCoprocessor, 
    AccessControlService.Interface,
    -178MasterObserver, RegionObserver, 
    RegionServerObserver, EndpointObserver, 

    [10/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
    index 54b1f96..ed95cbf 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
    @@ -31,922 +31,906 @@
     023import java.io.ByteArrayInputStream;
     024import java.io.IOException;
     025import java.util.ArrayList;
    -026import java.util.Arrays;
    -027import java.util.Collections;
    -028import java.util.HashMap;
    -029import java.util.HashSet;
    -030import java.util.LinkedList;
    -031import java.util.List;
    -032import java.util.Map;
    -033import java.util.NavigableSet;
    -034import java.util.Set;
    -035import java.util.SortedSet;
    -036import java.util.TreeSet;
    -037import 
    java.util.concurrent.atomic.AtomicBoolean;
    -038
    -039import 
    org.apache.hadoop.conf.Configuration;
    -040import org.apache.hadoop.hbase.Cell;
    -041import 
    org.apache.hadoop.hbase.CellUtil;
    -042import 
    org.apache.hadoop.hbase.Coprocessor;
    -043import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -044import 
    org.apache.hadoop.hbase.HColumnDescriptor;
    -045import 
    org.apache.hadoop.hbase.HConstants;
    -046import 
    org.apache.hadoop.hbase.HTableDescriptor;
    -047import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -048import 
    org.apache.hadoop.hbase.MetaTableAccessor.DefaultVisitorBase;
    -049import 
    org.apache.hadoop.hbase.ServerName;
    -050import 
    org.apache.hadoop.hbase.TableName;
    -051import 
    org.apache.hadoop.hbase.client.ClusterConnection;
    -052import 
    org.apache.hadoop.hbase.client.Delete;
    -053import 
    org.apache.hadoop.hbase.client.Get;
    -054import 
    org.apache.hadoop.hbase.client.Mutation;
    -055import 
    org.apache.hadoop.hbase.client.Put;
    -056import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -057import 
    org.apache.hadoop.hbase.client.Result;
    -058import 
    org.apache.hadoop.hbase.client.Scan;
    -059import 
    org.apache.hadoop.hbase.client.Table;
    -060import 
    org.apache.hadoop.hbase.client.TableState;
    -061import 
    org.apache.hadoop.hbase.constraint.ConstraintException;
    -062import 
    org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
    -063import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -064import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    -065import 
    org.apache.hadoop.hbase.master.MasterServices;
    -066import 
    org.apache.hadoop.hbase.master.ServerListener;
    -067import 
    org.apache.hadoop.hbase.master.TableStateManager;
    -068import 
    org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
    -069import 
    org.apache.hadoop.hbase.net.Address;
    -070import 
    org.apache.hadoop.hbase.procedure2.Procedure;
    -071import 
    org.apache.hadoop.hbase.protobuf.ProtobufMagic;
    -072import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    -073import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
    -074import 
    org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos;
    -075import 
    org.apache.hadoop.hbase.quotas.QuotaTableUtil;
    -076import 
    org.apache.hadoop.hbase.quotas.QuotaUtil;
    -077import 
    org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
    -078import 
    org.apache.hadoop.hbase.security.access.AccessControlLists;
    -079import 
    org.apache.hadoop.hbase.util.Bytes;
    -080import 
    org.apache.hadoop.hbase.zookeeper.ZKUtil;
    -081import 
    org.apache.hadoop.hbase.zookeeper.ZKWatcher;
    -082import 
    org.apache.hadoop.hbase.zookeeper.ZNodePaths;
    -083import 
    org.apache.yetus.audience.InterfaceAudience;
    -084import 
    org.apache.zookeeper.KeeperException;
    -085import org.slf4j.Logger;
    -086import org.slf4j.LoggerFactory;
    -087
    -088import 
    org.apache.hbase.thirdparty.com.google.common.collect.Lists;
    -089import 
    org.apache.hbase.thirdparty.com.google.common.collect.Maps;
    -090import 
    org.apache.hbase.thirdparty.com.google.common.collect.Sets;
    -091import 
    org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
    -092import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
    -093
    -094/**
    -095 * This is an implementation of {@link 
    RSGroupInfoManager} which makes
    -096 * use of an HBase table as the 
    persistence store for the group information.
    -097 * It also makes use of zookeeper to 
    store group information needed
    -098 * for bootstrapping during offline 
    mode.
    -099 *
    -100 * h2Concurrency/h2
    -101 * RSGroup state is kept locally in Maps. 
    There is a rsgroup name to cached
    -102 * RSGroupInfo Map at {@link #rsGroupMap} 
    and a Map of tables to the name of the
    -103 * rsgroup they belong too (in {@link 
    #tableMap}). These Maps are persisted to the
    -104 * hbase:rsgroup table (and cached in zk) 
    on each modification.
    -105 *
    -106 * pMutations on state are 
    synchronized but reads can continue without having
    -107 * to wait on an instance monitor, 
    mutations do wholesale 

    [10/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncSequentialReadTest.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncSequentialReadTest.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncSequentialReadTest.html
    index 3f8844b..cdb9398 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncSequentialReadTest.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncSequentialReadTest.html
    @@ -140,2712 +140,2713 @@
     132public class PerformanceEvaluation 
    extends Configured implements Tool {
     133  static final String RANDOM_SEEK_SCAN = 
    "randomSeekScan";
     134  static final String RANDOM_READ = 
    "randomRead";
    -135  private static final Logger LOG = 
    LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
    -136  private static final ObjectMapper 
    MAPPER = new ObjectMapper();
    -137  static {
    -138
    MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
    -139  }
    -140
    -141  public static final String TABLE_NAME = 
    "TestTable";
    -142  public static final String 
    FAMILY_NAME_BASE = "info";
    -143  public static final byte[] FAMILY_ZERO 
    = Bytes.toBytes("info0");
    -144  public static final byte[] COLUMN_ZERO 
    = Bytes.toBytes("" + 0);
    -145  public static final int 
    DEFAULT_VALUE_LENGTH = 1000;
    -146  public static final int ROW_LENGTH = 
    26;
    -147
    -148  private static final int ONE_GB = 1024 
    * 1024 * 1000;
    -149  private static final int 
    DEFAULT_ROWS_PER_GB = ONE_GB / DEFAULT_VALUE_LENGTH;
    -150  // TODO : should we make this 
    configurable
    -151  private static final int TAG_LENGTH = 
    256;
    -152  private static final DecimalFormat FMT 
    = new DecimalFormat("0.##");
    -153  private static final MathContext CXT = 
    MathContext.DECIMAL64;
    -154  private static final BigDecimal 
    MS_PER_SEC = BigDecimal.valueOf(1000);
    -155  private static final BigDecimal 
    BYTES_PER_MB = BigDecimal.valueOf(1024 * 1024);
    -156  private static final TestOptions 
    DEFAULT_OPTS = new TestOptions();
    -157
    -158  private static MapString, 
    CmdDescriptor COMMANDS = new TreeMap();
    -159  private static final Path PERF_EVAL_DIR 
    = new Path("performance_evaluation");
    -160
    -161  static {
    -162
    addCommandDescriptor(AsyncRandomReadTest.class, "asyncRandomRead",
    -163"Run async random read test");
    -164
    addCommandDescriptor(AsyncRandomWriteTest.class, "asyncRandomWrite",
    -165"Run async random write test");
    -166
    addCommandDescriptor(AsyncSequentialReadTest.class, "asyncSequentialRead",
    -167"Run async sequential read 
    test");
    -168
    addCommandDescriptor(AsyncSequentialWriteTest.class, "asyncSequentialWrite",
    -169"Run async sequential write 
    test");
    -170
    addCommandDescriptor(AsyncScanTest.class, "asyncScan",
    -171"Run async scan test (read every 
    row)");
    -172
    addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
    -173  "Run random read test");
    -174
    addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
    -175  "Run random seek and scan 100 
    test");
    -176
    addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
    -177  "Run random seek scan with both 
    start and stop row (max 10 rows)");
    -178
    addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
    -179  "Run random seek scan with both 
    start and stop row (max 100 rows)");
    -180
    addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
    -181  "Run random seek scan with both 
    start and stop row (max 1000 rows)");
    -182
    addCommandDescriptor(RandomScanWithRange1Test.class, "scanRange1",
    -183  "Run random seek scan with both 
    start and stop row (max 1 rows)");
    -184
    addCommandDescriptor(RandomWriteTest.class, "randomWrite",
    -185  "Run random write test");
    -186
    addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
    -187  "Run sequential read test");
    -188
    addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
    -189  "Run sequential write test");
    -190addCommandDescriptor(ScanTest.class, 
    "scan",
    -191  "Run scan test (read every 
    row)");
    -192
    addCommandDescriptor(FilteredScanTest.class, "filterScan",
    -193  "Run scan test using a filter to 
    find a specific row based on it's value " +
    -194  "(make sure to use --rows=20)");
    -195
    addCommandDescriptor(IncrementTest.class, "increment",
    -196  "Increment on each row; clients 
    overlap on keyspace so some concurrent operations");
    -197
    addCommandDescriptor(AppendTest.class, "append",
    -198  "Append on each row; clients 
    overlap on keyspace so some concurrent operations");
    -199
    addCommandDescriptor(CheckAndMutateTest.class, "checkAndMutate",
    -200  "CheckAndMutate on each row; 
    clients overlap on keyspace so some concurrent operations");
    

    [10/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushQueueEntry.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushQueueEntry.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushQueueEntry.html
    index 4c42811..0bc3ddb 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushQueueEntry.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushQueueEntry.html
    @@ -563,381 +563,390 @@
     555// If this is first time we've 
    been put off, then emit a log message.
     556if (fqe.getRequeueCount() = 
    0) {
     557  // Note: We don't impose 
    blockingStoreFiles constraint on meta regions
    -558  LOG.warn("Region " + 
    region.getRegionInfo().getEncodedName() + " has too many " +
    -559"store files; delaying flush 
    up to " + this.blockingWaitTime + "ms");
    -560  if 
    (!this.server.compactSplitThread.requestSplit(region)) {
    -561try {
    -562  
    this.server.compactSplitThread.requestSystemCompaction(region,
    -563
    Thread.currentThread().getName());
    -564} catch (IOException e) {
    -565  e = e instanceof 
    RemoteException ?
    -566  
    ((RemoteException)e).unwrapRemoteException() : e;
    -567  LOG.error("Cache flush 
    failed for region " +
    -568
    Bytes.toStringBinary(region.getRegionInfo().getRegionName()), e);
    -569}
    -570  }
    -571}
    -572
    -573// Put back on the queue.  Have 
    it come back out of the queue
    -574// after a delay of 
    this.blockingWaitTime / 100 ms.
    -575
    this.flushQueue.add(fqe.requeue(this.blockingWaitTime / 100));
    -576// Tell a lie, it's not flushed 
    but it's ok
    -577return true;
    -578  }
    -579}
    -580return flushRegion(region, false, 
    fqe.isForceFlushAllStores(), fqe.getTracker());
    -581  }
    -582
    -583  /**
    -584   * Flush a region.
    -585   * @param region Region to flush.
    -586   * @param emergencyFlush Set if we are 
    being force flushed. If true the region
    -587   * needs to be removed from the flush 
    queue. If false, when we were called
    -588   * from the main flusher run loop and 
    we got the entry to flush by calling
    -589   * poll on the flush queue (which 
    removed it).
    -590   * @param forceFlushAllStores whether 
    we want to flush all store.
    -591   * @return true if the region was 
    successfully flushed, false otherwise. If
    -592   * false, there will be accompanying 
    log messages explaining why the region was
    -593   * not flushed.
    -594   */
    -595  private boolean flushRegion(HRegion 
    region, boolean emergencyFlush, boolean forceFlushAllStores,
    -596  FlushLifeCycleTracker tracker) {
    -597synchronized (this.regionsInQueue) 
    {
    -598  FlushRegionEntry fqe = 
    this.regionsInQueue.remove(region);
    -599  // Use the start time of the 
    FlushRegionEntry if available
    -600  if (fqe != null  
    emergencyFlush) {
    -601// Need to remove from region 
    from delay queue. When NOT an
    -602// emergencyFlush, then item was 
    removed via a flushQueue.poll.
    -603flushQueue.remove(fqe);
    -604  }
    -605}
    -606
    -607tracker.beforeExecution();
    -608lock.readLock().lock();
    -609try {
    -610  notifyFlushRequest(region, 
    emergencyFlush);
    -611  FlushResult flushResult = 
    region.flushcache(forceFlushAllStores, false, tracker);
    -612  boolean shouldCompact = 
    flushResult.isCompactionNeeded();
    -613  // We just want to check the size
    -614  boolean shouldSplit = 
    region.checkSplit() != null;
    -615  if (shouldSplit) {
    -616
    this.server.compactSplitThread.requestSplit(region);
    -617  } else if (shouldCompact) {
    -618
    server.compactSplitThread.requestSystemCompaction(region, 
    Thread.currentThread().getName());
    -619  }
    -620} catch (DroppedSnapshotException ex) 
    {
    -621  // Cache flush can fail in a few 
    places. If it fails in a critical
    -622  // section, we get a 
    DroppedSnapshotException and a replay of wal
    -623  // is required. Currently the only 
    way to do this is a restart of
    -624  // the server. Abort because hdfs 
    is probably bad (HBASE-644 is a case
    -625  // where hdfs was bad but passed 
    the hdfs check).
    -626  server.abort("Replay of WAL 
    required. Forcing server shutdown", ex);
    -627  return false;
    -628} catch (IOException ex) {
    -629  ex = ex instanceof RemoteException 
    ? ((RemoteException) ex).unwrapRemoteException() : ex;
    -630  LOG.error(
    -631"Cache flush failed"
    -632+ (region != null ? (" for 
    region " +
    -633
    Bytes.toStringBinary(region.getRegionInfo().getRegionName()))
    -634  : ""), ex);
    -635  if (!server.checkFileSystem()) {
    -636return false;
    -637  }
    

    [10/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange10Test.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange10Test.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange10Test.html
    index 2510283..418c60c 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange10Test.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange10Test.html
    @@ -77,77 +77,77 @@
     069import 
    org.apache.hadoop.hbase.client.RowMutations;
     070import 
    org.apache.hadoop.hbase.client.Scan;
     071import 
    org.apache.hadoop.hbase.client.Table;
    -072import 
    org.apache.hadoop.hbase.filter.BinaryComparator;
    -073import 
    org.apache.hadoop.hbase.filter.Filter;
    -074import 
    org.apache.hadoop.hbase.filter.FilterAllFilter;
    -075import 
    org.apache.hadoop.hbase.filter.FilterList;
    -076import 
    org.apache.hadoop.hbase.filter.PageFilter;
    -077import 
    org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
    -078import 
    org.apache.hadoop.hbase.filter.WhileMatchFilter;
    -079import 
    org.apache.hadoop.hbase.io.compress.Compression;
    -080import 
    org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
    -081import 
    org.apache.hadoop.hbase.io.hfile.RandomDistribution;
    -082import 
    org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
    -083import 
    org.apache.hadoop.hbase.regionserver.BloomType;
    -084import 
    org.apache.hadoop.hbase.regionserver.CompactingMemStore;
    -085import 
    org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
    -086import 
    org.apache.hadoop.hbase.trace.SpanReceiverHost;
    -087import 
    org.apache.hadoop.hbase.trace.TraceUtil;
    -088import 
    org.apache.hadoop.hbase.util.ByteArrayHashKey;
    -089import 
    org.apache.hadoop.hbase.util.Bytes;
    -090import 
    org.apache.hadoop.hbase.util.Hash;
    -091import 
    org.apache.hadoop.hbase.util.MurmurHash;
    -092import 
    org.apache.hadoop.hbase.util.Pair;
    -093import 
    org.apache.hadoop.hbase.util.YammerHistogramUtils;
    -094import 
    org.apache.hadoop.io.LongWritable;
    -095import org.apache.hadoop.io.Text;
    -096import org.apache.hadoop.mapreduce.Job;
    -097import 
    org.apache.hadoop.mapreduce.Mapper;
    -098import 
    org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
    -099import 
    org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
    -100import 
    org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
    -101import org.apache.hadoop.util.Tool;
    -102import 
    org.apache.hadoop.util.ToolRunner;
    -103import 
    org.apache.htrace.core.ProbabilitySampler;
    -104import org.apache.htrace.core.Sampler;
    -105import 
    org.apache.htrace.core.TraceScope;
    -106import 
    org.apache.yetus.audience.InterfaceAudience;
    -107import org.slf4j.Logger;
    -108import org.slf4j.LoggerFactory;
    -109import 
    org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
    -110import 
    org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
    -111
    -112/**
    -113 * Script used evaluating HBase 
    performance and scalability.  Runs a HBase
    -114 * client that steps through one of a set 
    of hardcoded tests or 'experiments'
    -115 * (e.g. a random reads test, a random 
    writes test, etc.). Pass on the
    -116 * command-line which test to run and how 
    many clients are participating in
    -117 * this experiment. Run {@code 
    PerformanceEvaluation --help} to obtain usage.
    -118 *
    -119 * pThis class sets up and runs 
    the evaluation programs described in
    -120 * Section 7, iPerformance 
    Evaluation/i, of the a
    -121 * 
    href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
    -122 * paper, pages 8-10.
    -123 *
    -124 * pBy default, runs as a 
    mapreduce job where each mapper runs a single test
    -125 * client. Can also run as a 
    non-mapreduce, multithreaded application by
    -126 * specifying {@code --nomapred}. Each 
    client does about 1GB of data, unless
    -127 * specified otherwise.
    -128 */
    -129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
    -130public class PerformanceEvaluation 
    extends Configured implements Tool {
    -131  static final String RANDOM_SEEK_SCAN = 
    "randomSeekScan";
    -132  static final String RANDOM_READ = 
    "randomRead";
    -133  private static final Logger LOG = 
    LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
    -134  private static final ObjectMapper 
    MAPPER = new ObjectMapper();
    -135  static {
    -136
    MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
    -137  }
    -138
    -139  public static final String TABLE_NAME = 
    "TestTable";
    -140  public static final byte[] FAMILY_NAME 
    = Bytes.toBytes("info");
    -141  public static final byte [] COLUMN_ZERO 
    = Bytes.toBytes("" + 0);
    -142  public static final byte [] 
    QUALIFIER_NAME = COLUMN_ZERO;
    +072import 
    org.apache.hadoop.hbase.client.metrics.ScanMetrics;
    +073import 
    org.apache.hadoop.hbase.filter.BinaryComparator;
    +074import 
    

    [10/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
    deleted file mode 100644
    index adbfc60..000
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
    +++ /dev/null
    @@ -1,757 +0,0 @@
    -http://www.w3.org/TR/html4/loose.dtd;>
    -
    -
    -
    -
    -
    -TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
     (Apache HBase 3.0.0-SNAPSHOT Test API)
    -
    -
    -
    -
    -
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
    -var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
    -var altColor = "altColor";
    -var rowColor = "rowColor";
    -var tableTab = "tableTab";
    -var activeTableTab = "activeTableTab";
    -
    -
    -JavaScript is disabled on your browser.
    -
    -
    -
    -
    -
    -Skip navigation links
    -
    -
    -
    -
    -Overview
    -Package
    -Class
    -Use
    -Tree
    -Deprecated
    -Index
    -Help
    -
    -
    -
    -
    -PrevClass
    -NextClass
    -
    -
    -Frames
    -NoFrames
    -
    -
    -AllClasses
    -
    -
    -
    -
    -
    -
    -
    -Summary:
    -Nested|
    -Field|
    -Constr|
    -Method
    -
    -
    -Detail:
    -Field|
    -Constr|
    -Method
    -
    -
    -
    -
    -
    -
    -
    -
    -org.apache.hadoop.hbase.replication.regionserver
    -Class 
    TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
    -
    -
    -
    -https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
    -
    -
    -org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
    -
    -
    -
    -
    -
    -
    -
    -All Implemented Interfaces:
    -org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
    -
    -
    -Enclosing class:
    -TestReplicator.FailureInjectingReplicationEndpointForTest
    -
    -
    -
    -static class TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
    -extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
    -implements 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Field Summary
    -
    -Fields
    -
    -Modifier and Type
    -Field and Description
    -
    -
    -private 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
    -delegate
    -
    -
    -private boolean
    -failNext
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Constructor Summary
    -
    -Constructors
    -
    -Constructor and Description
    -
    -
    -FailureInjectingBlockingInterface(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterfacedelegate)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Method Summary
    -
    -All MethodsInstance MethodsConcrete Methods
    -
    -Modifier and Type
    -Method and Description
    -
    -
    -org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse
    -clearCompactionQueues(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    - 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequestrequest)
    -
    -
    -org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheResponse
    -clearRegionBlockCache(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    - 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequestrequest)
    -
    -
    -org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse
    -closeRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
    -   
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequestrequest)
    -
    -
    -org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse
    

    [10/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
    index 8302e28..c370eb9 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
    @@ -2113,3031 +2113,3033 @@
     2105
    errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
     2106tableName + " unable to 
    delete dangling table state " + tableState);
     2107  }
    -2108} else {
    -2109  
    errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
    -2110  tableName + " has dangling 
    table state " + tableState);
    -2111}
    -2112  }
    -2113}
    -2114// check that all tables have 
    states
    -2115for (TableName tableName : 
    tablesInfo.keySet()) {
    -2116  if (isTableIncluded(tableName) 
     !tableStates.containsKey(tableName)) {
    -2117if (fixMeta) {
    -2118  
    MetaTableAccessor.updateTableState(connection, tableName, 
    TableState.State.ENABLED);
    -2119  TableState newState = 
    MetaTableAccessor.getTableState(connection, tableName);
    -2120  if (newState == null) {
    -2121
    errors.reportError(ERROR_CODE.NO_TABLE_STATE,
    -2122"Unable to change state 
    for table " + tableName + " in meta ");
    -2123  }
    -2124} else {
    -2125  
    errors.reportError(ERROR_CODE.NO_TABLE_STATE,
    -2126  tableName + " has no state 
    in meta ");
    -2127}
    -2128  }
    -2129}
    -2130  }
    -2131
    -2132  private void preCheckPermission() 
    throws IOException, AccessDeniedException {
    -2133if 
    (shouldIgnorePreCheckPermission()) {
    -2134  return;
    -2135}
    -2136
    -2137Path hbaseDir = 
    FSUtils.getRootDir(getConf());
    -2138FileSystem fs = 
    hbaseDir.getFileSystem(getConf());
    -2139UserProvider userProvider = 
    UserProvider.instantiate(getConf());
    -2140UserGroupInformation ugi = 
    userProvider.getCurrent().getUGI();
    -2141FileStatus[] files = 
    fs.listStatus(hbaseDir);
    -2142for (FileStatus file : files) {
    -2143  try {
    -2144FSUtils.checkAccess(ugi, file, 
    FsAction.WRITE);
    -2145  } catch (AccessDeniedException 
    ace) {
    -2146LOG.warn("Got 
    AccessDeniedException when preCheckPermission ", ace);
    -2147
    errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + 
    ugi.getUserName()
    -2148  + " does not have write perms 
    to " + file.getPath()
    -2149  + ". Please rerun hbck as hdfs 
    user " + file.getOwner());
    -2150throw ace;
    -2151  }
    -2152}
    -2153  }
    -2154
    -2155  /**
    -2156   * Deletes region from meta table
    -2157   */
    -2158  private void deleteMetaRegion(HbckInfo 
    hi) throws IOException {
    -2159
    deleteMetaRegion(hi.metaEntry.getRegionName());
    -2160  }
    -2161
    -2162  /**
    -2163   * Deletes region from meta table
    -2164   */
    -2165  private void deleteMetaRegion(byte[] 
    metaKey) throws IOException {
    -2166Delete d = new Delete(metaKey);
    -2167meta.delete(d);
    -2168LOG.info("Deleted " + 
    Bytes.toString(metaKey) + " from META" );
    -2169  }
    -2170
    -2171  /**
    -2172   * Reset the split parent region info 
    in meta table
    -2173   */
    -2174  private void resetSplitParent(HbckInfo 
    hi) throws IOException {
    -2175RowMutations mutations = new 
    RowMutations(hi.metaEntry.getRegionName());
    -2176Delete d = new 
    Delete(hi.metaEntry.getRegionName());
    -2177
    d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
    -2178
    d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
    -2179mutations.add(d);
    -2180
    -2181RegionInfo hri = 
    RegionInfoBuilder.newBuilder(hi.metaEntry)
    -2182.setOffline(false)
    -2183.setSplit(false)
    -2184.build();
    -2185Put p = 
    MetaTableAccessor.makePutFromRegionInfo(hri, 
    EnvironmentEdgeManager.currentTime());
    -2186mutations.add(p);
    -2187
    -2188meta.mutateRow(mutations);
    -2189LOG.info("Reset split parent " + 
    hi.metaEntry.getRegionNameAsString() + " in META" );
    -2190  }
    -2191
    -2192  /**
    -2193   * This backwards-compatibility 
    wrapper for permanently offlining a region
    -2194   * that should not be alive.  If the 
    region server does not support the
    -2195   * "offline" method, it will use the 
    closest unassign method instead.  This
    -2196   * will basically work until one 
    attempts to disable or delete the affected
    -2197   * table.  The problem has to do with 
    in-memory only master state, so
    -2198   * restarting the HMaster or failing 
    over to another should fix this.
    -2199   */
    -2200  private void offline(byte[] 
    regionName) throws IOException {
    -2201String regionString = 
    Bytes.toStringBinary(regionName);
    -2202if 

    [10/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html
    index b035b7c..29f1b92 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html
    @@ -37,19 +37,19 @@
     029import java.util.Set;
     030import 
    java.util.concurrent.atomic.LongAdder;
     031
    -032import 
    org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
    -033import 
    org.apache.commons.collections4.map.LinkedMap;
    -034import 
    org.apache.yetus.audience.InterfaceAudience;
    -035import org.slf4j.Logger;
    -036import org.slf4j.LoggerFactory;
    -037import 
    org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
    -038import 
    org.apache.hadoop.hbase.io.hfile.CacheConfig;
    -039import 
    org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
    -040import 
    com.fasterxml.jackson.annotation.JsonIgnoreProperties;
    -041
    -042import 
    org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
    -043import 
    org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
    -044import 
    org.apache.hbase.thirdparty.com.google.common.primitives.Ints;
    +032import 
    org.apache.yetus.audience.InterfaceAudience;
    +033import org.slf4j.Logger;
    +034import org.slf4j.LoggerFactory;
    +035import 
    org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
    +036import 
    org.apache.hadoop.hbase.io.hfile.CacheConfig;
    +037import 
    org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
    +038import 
    com.fasterxml.jackson.annotation.JsonIgnoreProperties;
    +039
    +040import 
    org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
    +041import 
    org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
    +042import 
    org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
    +043import 
    org.apache.hbase.thirdparty.com.google.common.primitives.Ints;
    +044import 
    org.apache.hbase.thirdparty.org.apache.commons.collections4.map.LinkedMap;
     045
     046/**
     047 * This class is used to allocate a block 
    with specified size and free the block
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html
    index b035b7c..29f1b92 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html
    @@ -37,19 +37,19 @@
     029import java.util.Set;
     030import 
    java.util.concurrent.atomic.LongAdder;
     031
    -032import 
    org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
    -033import 
    org.apache.commons.collections4.map.LinkedMap;
    -034import 
    org.apache.yetus.audience.InterfaceAudience;
    -035import org.slf4j.Logger;
    -036import org.slf4j.LoggerFactory;
    -037import 
    org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
    -038import 
    org.apache.hadoop.hbase.io.hfile.CacheConfig;
    -039import 
    org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
    -040import 
    com.fasterxml.jackson.annotation.JsonIgnoreProperties;
    -041
    -042import 
    org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
    -043import 
    org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
    -044import 
    org.apache.hbase.thirdparty.com.google.common.primitives.Ints;
    +032import 
    org.apache.yetus.audience.InterfaceAudience;
    +033import org.slf4j.Logger;
    +034import org.slf4j.LoggerFactory;
    +035import 
    org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
    +036import 
    org.apache.hadoop.hbase.io.hfile.CacheConfig;
    +037import 
    org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
    +038import 
    com.fasterxml.jackson.annotation.JsonIgnoreProperties;
    +039
    +040import 
    org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
    +041import 
    org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
    +042import 
    org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
    +043import 
    org.apache.hbase.thirdparty.com.google.common.primitives.Ints;
    +044import 
    org.apache.hbase.thirdparty.org.apache.commons.collections4.map.LinkedMap;
     045
     046/**
     047 * This class is used to allocate a block 
    with specified size and free the block
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html
    

    [10/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html 
    b/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
    index a7f86c2..589dbb3 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
    @@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public interface AsyncTableC
     extends ScanResultConsumerBase
    +public interface AsyncTableC
     extends ScanResultConsumerBase
     The interface for asynchronous version of Table. Obtain an 
    instance from a
      AsyncConnection.
      
    @@ -409,7 +409,7 @@ public interface 
     
     getName
    -TableNamegetName()
    +TableNamegetName()
     Gets the fully qualified table name instance of this 
    table.
     
     
    @@ -419,7 +419,7 @@ public interface 
     
     getConfiguration
    -org.apache.hadoop.conf.ConfigurationgetConfiguration()
    +org.apache.hadoop.conf.ConfigurationgetConfiguration()
     Returns the Configuration object used by this 
    instance.
      
      The reference returned is not a copy, so any change made to it will affect 
    this instance.
    @@ -431,7 +431,7 @@ public interface 
     
     getRpcTimeout
    -longgetRpcTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
    +longgetRpcTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
     Get timeout of each rpc request in this Table instance. It 
    will be overridden by a more
      specific rpc timeout config such as readRpcTimeout or writeRpcTimeout.
     
    @@ -451,7 +451,7 @@ public interface 
     
     getReadRpcTimeout
    -longgetReadRpcTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
    +longgetReadRpcTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
     Get timeout of each rpc read request in this Table 
    instance.
     
     Parameters:
    @@ -467,7 +467,7 @@ public interface 
     
     getWriteRpcTimeout
    -longgetWriteRpcTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
    +longgetWriteRpcTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
     Get timeout of each rpc write request in this Table 
    instance.
     
     Parameters:
    @@ -483,7 +483,7 @@ public interface 
     
     getOperationTimeout
    -longgetOperationTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
    +longgetOperationTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
     Get timeout of each operation in Table instance.
     
     Parameters:
    @@ -499,7 +499,7 @@ public interface 
     
     getScanTimeout
    -longgetScanTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
    +longgetScanTimeout(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">TimeUnitunit)
     Get the timeout of a single operation in a scan. It works 
    like operation timeout for other
      operations.
     
    @@ -516,7 +516,7 @@ public interface 
     
     exists
    -defaulthttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
     title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists(Getget)
    +defaulthttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
     title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists(Getget)
     Test for the existence of columns in the table, as 
    specified by the Get.
      
      This will return true if the Get matches one or more keys, false if not.
    @@ -535,7 +535,7 @@ public interface 
     
     get
    -https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
     title="class or interface in java.util.concurrent">CompletableFutureResultget(Getget)
    

    [10/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
    b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    index dc4b7bd..41b2105 100644
    --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    @@ -495,15 +495,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     static Filter
    -ColumnPrefixFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +SingleColumnValueExcludeFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnCountGetFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ValueFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -RowFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +FamilyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    @@ -513,69 +513,69 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     static Filter
    -FirstKeyOnlyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnPrefixFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -TimestampsFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +PageFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ValueFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +RowFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -KeyOnlyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnRangeFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -FamilyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnCountGetFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -QualifierFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +MultipleColumnPrefixFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnRangeFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnPaginationFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    

    [10/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.html
     
    b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.html
    index 8e95a5a..d656137 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.html
    @@ -113,7 +113,8 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class DelegatingInternalScanner
    +@InterfaceAudience.Private
    +public class DelegatingInternalScanner
     extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements InternalScanner
     A simple delegation for doing filtering on InternalScanner.
    @@ -219,7 +220,7 @@ implements 
     
     scanner
    -protected finalInternalScanner 
    scanner
    +protected finalInternalScanner 
    scanner
     
     
     
    @@ -236,7 +237,7 @@ implements 
     
     DelegatingInternalScanner
    -publicDelegatingInternalScanner(InternalScannerscanner)
    +publicDelegatingInternalScanner(InternalScannerscanner)
     
     
     
    @@ -253,7 +254,7 @@ implements 
     
     next
    -publicbooleannext(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellresult,
    +publicbooleannext(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellresult,
     ScannerContextscannerContext)
      throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Description copied from 
    interface:InternalScanner
    @@ -276,7 +277,7 @@ implements 
     
     close
    -publicvoidclose()
    +publicvoidclose()
    throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Description copied from 
    interface:InternalScanner
     Closes the scanner and releases any resources it has 
    allocated
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.html
     
    b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.html
    index 0bcd0a5..1cef255 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.html
    @@ -113,7 +113,8 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class ExampleMasterObserverWithMetrics
    +@InterfaceAudience.Private
    +public class ExampleMasterObserverWithMetrics
     extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements MasterCoprocessor, MasterObserver
     An example coprocessor that collects some metrics to 
    demonstrate the usage of exporting custom
    @@ -309,7 +310,7 @@ implements 
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -318,7 +319,7 @@ implements 
     
     createTableTimer
    -privateTimer createTableTimer
    +privateTimer createTableTimer
     This is the Timer metric object to keep track of the 
    current count across invocations
     
     
    @@ -328,7 +329,7 @@ implements 
     
     createTableStartTime
    -privatelong createTableStartTime
    +privatelong createTableStartTime
     
     
     
    @@ -337,7 +338,7 @@ implements 
     
     disableTableCounter
    -privateCounter disableTableCounter
    +privateCounter disableTableCounter
     This is a Counter object to keep track of disableTable 
    operations
     
     
    @@ -355,7 +356,7 @@ implements 
     
     ExampleMasterObserverWithMetrics
    -publicExampleMasterObserverWithMetrics()
    +publicExampleMasterObserverWithMetrics()
     
     
     
    @@ -372,7 +373,7 @@ implements 
     
     getMasterObserver
    -publichttps://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
     title="class or interface in java.util">OptionalMasterObservergetMasterObserver()
    +publichttps://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
     title="class or interface in java.util">OptionalMasterObservergetMasterObserver()
     
     Specified by:
     getMasterObserverin
     interfaceMasterCoprocessor
    @@ -385,7 +386,7 @@ implements 
     
     getTotalMemory
    -privatelonggetTotalMemory()
    +privatelonggetTotalMemory()
     Returns the total memory of the process. We will use this 
    to define a gauge metric
     
     
    @@ -395,7 +396,7 @@ implements 
     
     getMaxMemory
    -privatelonggetMaxMemory()
    

    [10/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.SafePointZigZagLatch.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.SafePointZigZagLatch.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.SafePointZigZagLatch.html
    index 9971079..03c8b000 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.SafePointZigZagLatch.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.SafePointZigZagLatch.html
    @@ -49,1067 +49,1082 @@
     041import org.apache.hadoop.fs.Path;
     042import 
    org.apache.hadoop.hbase.HConstants;
     043import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -044import 
    org.apache.hadoop.hbase.trace.TraceUtil;
    -045import 
    org.apache.hadoop.hbase.util.Bytes;
    -046import 
    org.apache.hadoop.hbase.util.ClassSize;
    -047import 
    org.apache.hadoop.hbase.util.FSUtils;
    -048import 
    org.apache.hadoop.hbase.util.HasThread;
    -049import 
    org.apache.hadoop.hbase.util.Threads;
    -050import 
    org.apache.hadoop.hbase.wal.FSHLogProvider;
    -051import 
    org.apache.hadoop.hbase.wal.WALEdit;
    -052import 
    org.apache.hadoop.hbase.wal.WALKeyImpl;
    -053import 
    org.apache.hadoop.hbase.wal.WALProvider.Writer;
    -054import 
    org.apache.hadoop.hdfs.DFSOutputStream;
    -055import 
    org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
    -056import 
    org.apache.hadoop.hdfs.protocol.DatanodeInfo;
    -057import 
    org.apache.htrace.core.TraceScope;
    -058import 
    org.apache.yetus.audience.InterfaceAudience;
    -059import org.slf4j.Logger;
    -060import org.slf4j.LoggerFactory;
    -061import 
    org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
    -062
    -063/**
    -064 * The default implementation of FSWAL.
    -065 */
    -066@InterfaceAudience.Private
    -067public class FSHLog extends 
    AbstractFSWALWriter {
    -068  // IMPLEMENTATION NOTES:
    -069  //
    -070  // At the core is a ring buffer. Our 
    ring buffer is the LMAX Disruptor. It tries to
    -071  // minimize synchronizations and 
    volatile writes when multiple contending threads as is the case
    -072  // here appending and syncing on a 
    single WAL. The Disruptor is configured to handle multiple
    -073  // producers but it has one consumer 
    only (the producers in HBase are IPC Handlers calling append
    -074  // and then sync). The single 
    consumer/writer pulls the appends and syncs off the ring buffer.
    -075  // When a handler calls sync, it is 
    given back a future. The producer 'blocks' on the future so
    -076  // it does not return until the sync 
    completes. The future is passed over the ring buffer from
    -077  // the producer/handler to the consumer 
    thread where it does its best to batch up the producer
    -078  // syncs so one WAL sync actually spans 
    multiple producer sync invocations. How well the
    -079  // batching works depends on the write 
    rate; i.e. we tend to batch more in times of
    -080  // high writes/syncs.
    -081  //
    -082  // Calls to append now also wait until 
    the append has been done on the consumer side of the
    -083  // disruptor. We used to not wait but 
    it makes the implementation easier to grok if we have
    -084  // the region edit/sequence id after 
    the append returns.
    -085  //
    -086  // TODO: Handlers need to coordinate 
    appending AND syncing. Can we have the threads contend
    -087  // once only? Probably hard given syncs 
    take way longer than an append.
    -088  //
    -089  // The consumer threads pass the syncs 
    off to multiple syncing threads in a round robin fashion
    -090  // to ensure we keep up back-to-back FS 
    sync calls (FS sync calls are the long poll writing the
    -091  // WAL). The consumer thread passes the 
    futures to the sync threads for it to complete
    -092  // the futures when done.
    -093  //
    -094  // The 'sequence' in the below is the 
    sequence of the append/sync on the ringbuffer. It
    -095  // acts as a sort-of transaction id. It 
    is always incrementing.
    -096  //
    -097  // The RingBufferEventHandler class 
    hosts the ring buffer consuming code. The threads that
    -098  // do the actual FS sync are 
    implementations of SyncRunner. SafePointZigZagLatch is a
    -099  // synchronization class used to halt 
    the consumer at a safe point -- just after all outstanding
    -100  // syncs and appends have completed -- 
    so the log roller can swap the WAL out under it.
    -101  //
    -102  // We use ring buffer sequence as txid 
    of FSWALEntry and SyncFuture.
    -103  private static final Logger LOG = 
    LoggerFactory.getLogger(FSHLog.class);
    -104
    -105  /**
    -106   * The nexus at which all incoming 
    handlers meet. Does appends and sync with an ordering. Appends
    -107   * and syncs are each put on the ring 
    which means handlers need to smash up against the ring twice
    -108   * (can we make it once only? ... maybe 
    not since time to append is so different from time to sync
    -109   * and sometimes we don't want to sync 
    or we want to async the sync). The ring is where we make
    -110   * sure of our 

    [10/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
    index 3bc66bb..97aa79c 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
    @@ -1435,459 +1435,460 @@
     1427   */
     1428  private void execProcedure(final 
    RootProcedureState procStack,
     1429  final 
    ProcedureTEnvironment procedure) {
    -1430
    Preconditions.checkArgument(procedure.getState() == ProcedureState.RUNNABLE);
    -1431
    -1432// Procedures can suspend 
    themselves. They skip out by throwing a ProcedureSuspendedException.
    -1433// The exception is caught below and 
    then we hurry to the exit without disturbing state. The
    -1434// idea is that the processing of 
    this procedure will be unsuspended later by an external event
    -1435// such the report of a region open. 
    TODO: Currently, its possible for two worker threads
    -1436// to be working on the same 
    procedure concurrently (locking in procedures is NOT about
    -1437// concurrency but about tying an 
    entity to a procedure; i.e. a region to a particular
    -1438// procedure instance). This can 
    make for issues if both threads are changing state.
    -1439// See 
    env.getProcedureScheduler().wakeEvent(regionNode.getProcedureEvent());
    -1440// in 
    RegionTransitionProcedure#reportTransition for example of Procedure putting
    -1441// itself back on the scheduler 
    making it possible for two threads running against
    -1442// the one Procedure. Might be ok if 
    they are both doing different, idempotent sections.
    -1443boolean suspended = false;
    -1444
    -1445// Whether to 're-' -execute; run 
    through the loop again.
    -1446boolean reExecute = false;
    -1447
    -1448ProcedureTEnvironment[] 
    subprocs = null;
    -1449do {
    -1450  reExecute = false;
    -1451  try {
    -1452subprocs = 
    procedure.doExecute(getEnvironment());
    -1453if (subprocs != null  
    subprocs.length == 0) {
    -1454  subprocs = null;
    -1455}
    -1456  } catch 
    (ProcedureSuspendedException e) {
    -1457if (LOG.isTraceEnabled()) {
    -1458  LOG.trace("Suspend " + 
    procedure);
    -1459}
    -1460suspended = true;
    -1461  } catch (ProcedureYieldException 
    e) {
    -1462if (LOG.isTraceEnabled()) {
    -1463  LOG.trace("Yield " + procedure 
    + ": " + e.getMessage(), e);
    -1464}
    -1465scheduler.yield(procedure);
    -1466return;
    -1467  } catch (InterruptedException e) 
    {
    -1468if (LOG.isTraceEnabled()) {
    -1469  LOG.trace("Yield interrupt " + 
    procedure + ": " + e.getMessage(), e);
    -1470}
    -1471
    handleInterruptedException(procedure, e);
    -1472scheduler.yield(procedure);
    -1473return;
    -1474  } catch (Throwable e) {
    -1475// Catch NullPointerExceptions 
    or similar errors...
    -1476String msg = "CODE-BUG: Uncaught 
    runtime exception: " + procedure;
    -1477LOG.error(msg, e);
    -1478procedure.setFailure(new 
    RemoteProcedureException(msg, e));
    -1479  }
    -1480
    -1481  if (!procedure.isFailed()) {
    -1482if (subprocs != null) {
    -1483  if (subprocs.length == 1 
     subprocs[0] == procedure) {
    -1484// Procedure returned 
    itself. Quick-shortcut for a state machine-like procedure;
    -1485// i.e. we go around this 
    loop again rather than go back out on the scheduler queue.
    -1486subprocs = null;
    -1487reExecute = true;
    -1488if (LOG.isTraceEnabled()) 
    {
    -1489  LOG.trace("Short-circuit 
    to next step on pid=" + procedure.getProcId());
    -1490}
    -1491  } else {
    -1492// Yield the current 
    procedure, and make the subprocedure runnable
    -1493// subprocs may come back 
    'null'.
    -1494subprocs = 
    initializeChildren(procStack, procedure, subprocs);
    -1495LOG.info("Initialized 
    subprocedures=" +
    -1496  (subprocs == null? null:
    -1497
    Stream.of(subprocs).map(e - "{" + e.toString() + "}").
    -1498
    collect(Collectors.toList()).toString()));
    -1499  }
    -1500} else if (procedure.getState() 
    == ProcedureState.WAITING_TIMEOUT) {
    -1501  if (LOG.isTraceEnabled()) {
    -1502LOG.trace("Added to 
    timeoutExecutor " + procedure);
    -1503  }
    -1504  
    timeoutExecutor.add(procedure);
    -1505} else if (!suspended) {
    -1506  // No subtask, so we are 
    done
    -1507  
    

    [10/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
    index ab343c1..9bafd71 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
    @@ -90,32 +90,32 @@
     082
    conf.getInt(HConstants.HBASE_META_BLOCK_SIZE, 
    HConstants.DEFAULT_HBASE_META_BLOCK_SIZE);
     083return TableDescriptorBuilder
     084.newBuilder(REPLICATION_TABLE)
    -085.addColumnFamily(
    +085.setColumnFamily(
     086  
    ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_PEER).setMaxVersions(metaMaxVersion)
     087  
    .setInMemory(true).setBlocksize(metaBlockSize)
     088  
    .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
     089  .build())
    -090.addColumnFamily(
    +090.setColumnFamily(
     091  
    ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_RS_STATE).setMaxVersions(metaMaxVersion)
     092  
    .setInMemory(true).setBlocksize(metaBlockSize)
     093  
    .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
     094  .build())
    -095.addColumnFamily(
    +095.setColumnFamily(
     096  
    ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_QUEUE).setMaxVersions(metaMaxVersion)
     097  
    .setInMemory(true).setBlocksize(metaBlockSize)
     098  
    .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
     099  .build())
    -100.addColumnFamily(
    +100.setColumnFamily(
     101  
    ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_WAL)
     102  
    .setMaxVersions(HConstants.ALL_VERSIONS).setInMemory(true)
     103  
    .setBlocksize(metaBlockSize).setScope(HConstants.REPLICATION_SCOPE_LOCAL)
     104  
    .setBloomFilterType(BloomType.NONE).build())
    -105.addColumnFamily(
    +105.setColumnFamily(
     106  
    ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_REGIONS).setMaxVersions(metaMaxVersion)
     107  
    .setInMemory(true).setBlocksize(metaBlockSize)
     108  
    .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
     109  .build())
    -110.addColumnFamily(
    +110.setColumnFamily(
     111  
    ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_HFILE_REFS)
     112  
    .setMaxVersions(metaMaxVersion).setInMemory(true).setBlocksize(metaBlockSize)
     113  
    .setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    index eeb8705..ea9558d 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
    @@ -1136,7 +1136,7 @@
     1128
    setScope(HConstants.REPLICATION_SCOPE_LOCAL).build();
     1129TableDescriptor td =
     1130
    TableDescriptorBuilder.newBuilder(AccessControlLists.ACL_TABLE_NAME).
    -1131addColumnFamily(cfd).build();
    +1131  
    setColumnFamily(cfd).build();
     1132admin.createTable(td);
     1133  }
     1134
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    index eeb8705..ea9558d 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
    @@ -1136,7 +1136,7 @@
     1128
    setScope(HConstants.REPLICATION_SCOPE_LOCAL).build();
     1129TableDescriptor td =
     1130
    TableDescriptorBuilder.newBuilder(AccessControlLists.ACL_TABLE_NAME).
    -1131addColumnFamily(cfd).build();
    +1131  
    setColumnFamily(cfd).build();
     1132admin.createTable(td);
     1133  }
     1134
    
    

    [10/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
    index 286bad8..4c4a4d7 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
    @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -private final class HStore.StoreFlusherImpl
    +private final class HStore.StoreFlusherImpl
     extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements StoreFlushContext
     
    @@ -279,7 +279,7 @@ implements 
     
     tracker
    -private finalFlushLifeCycleTracker tracker
    +private finalFlushLifeCycleTracker tracker
     
     
     
    @@ -288,7 +288,7 @@ implements 
     
     cacheFlushSeqNum
    -private finallong cacheFlushSeqNum
    +private finallong cacheFlushSeqNum
     
     
     
    @@ -297,7 +297,7 @@ implements 
     
     snapshot
    -privateMemStoreSnapshot snapshot
    +privateMemStoreSnapshot snapshot
     
     
     
    @@ -306,7 +306,7 @@ implements 
     
     tempFiles
    -privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.fs.Path tempFiles
    +privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.fs.Path tempFiles
     
     
     
    @@ -315,7 +315,7 @@ implements 
     
     committedFiles
    -privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.fs.Path committedFiles
    +privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.fs.Path committedFiles
     
     
     
    @@ -324,7 +324,7 @@ implements 
     
     cacheFlushCount
    -privatelong cacheFlushCount
    +privatelong cacheFlushCount
     
     
     
    @@ -333,7 +333,7 @@ implements 
     
     cacheFlushSize
    -privatelong cacheFlushSize
    +privatelong cacheFlushSize
     
     
     
    @@ -342,7 +342,7 @@ implements 
     
     outputFileSize
    -privatelong outputFileSize
    +privatelong outputFileSize
     
     
     
    @@ -359,7 +359,7 @@ implements 
     
     StoreFlusherImpl
    -privateStoreFlusherImpl(longcacheFlushSeqNum,
    +privateStoreFlusherImpl(longcacheFlushSeqNum,
      FlushLifeCycleTrackertracker)
     
     
    @@ -377,7 +377,7 @@ implements 
     
     prepare
    -publicMemStoreSizeprepare()
    +publicMemStoreSizeprepare()
     This is not thread safe. The caller should have a lock on 
    the region or the store.
      If necessary, the lock can be added with the patch provided in 
    HBASE-10087
     
    @@ -394,7 +394,7 @@ implements 
     
     flushCache
    -publicvoidflushCache(MonitoredTaskstatus)
    +publicvoidflushCache(MonitoredTaskstatus)
     throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Description copied from 
    interface:StoreFlushContext
     Flush the cache (create the new store file)
    @@ -415,7 +415,7 @@ implements 
     
     commit
    -publicbooleancommit(MonitoredTaskstatus)
    +publicbooleancommit(MonitoredTaskstatus)
    throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     Description copied from 
    interface:StoreFlushContext
     Commit the flush - add the store file to the store and 
    clear the
    @@ -439,7 +439,7 @@ implements 
     
     getOutputFileSize
    -publiclonggetOutputFileSize()
    +publiclonggetOutputFileSize()
     
     Specified by:
     getOutputFileSizein
     interfaceStoreFlushContext
    @@ -454,7 +454,7 @@ implements 
     
     getCommittedFiles
    -publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.fs.PathgetCommittedFiles()
    +publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 
    java.util">Listorg.apache.hadoop.fs.PathgetCommittedFiles()
     Description copied from 
    interface:StoreFlushContext
     Returns the newly committed files from the flush. Called 
    only if commit returns true
     
    @@ -471,7 +471,7 @@ implements 
     
     replayFlush
    -publicvoidreplayFlush(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringfileNames,
    +publicvoidreplayFlush(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     

    [10/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
    index de426ec..c0b3f3f 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
    @@ -168,39 +168,27 @@
     
     
     void
    -CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    -  Cacheablebuf)
    -
    -
    -void
     BlockCache.cacheBlock(BlockCacheKeycacheKey,
       Cacheablebuf)
     Add block to cache (defaults to not in-memory).
     
     
    -
    +
     void
     LruBlockCache.cacheBlock(BlockCacheKeycacheKey,
       Cacheablebuf)
     Cache the block with the specified name and buffer.
     
     
    -
    -void
    -MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    -  Cacheablebuf)
    -
     
     void
    -CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    -  Cacheablebuf,
    -  booleaninMemory)
    +CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    +  Cacheablebuf)
     
     
     void
    -InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    -  Cacheablebuf,
    -  booleaninMemory)
    +MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    +  Cacheablebuf)
     
     
     void
    @@ -220,6 +208,18 @@
     
     
     void
    +CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    +  Cacheablebuf,
    +  booleaninMemory)
    +
    +
    +void
    +InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
    +  Cacheablebuf,
    +  booleaninMemory)
    +
    +
    +void
     MemcachedBlockCache.cacheBlock(BlockCacheKeycacheKey,
       Cacheablebuf,
       booleaninMemory)
    @@ -232,21 +232,21 @@
     
     
     boolean
    -CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
    +BlockCache.evictBlock(BlockCacheKeycacheKey)
    +Evict block from cache.
    +
     
     
     boolean
    -InclusiveCombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
    +LruBlockCache.evictBlock(BlockCacheKeycacheKey)
     
     
     boolean
    -BlockCache.evictBlock(BlockCacheKeycacheKey)
    -Evict block from cache.
    -
    +CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
     
     
     boolean
    -LruBlockCache.evictBlock(BlockCacheKeycacheKey)
    +InclusiveCombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
     
     
     boolean
    @@ -254,35 +254,35 @@
     
     
     Cacheable
    -CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
    +BlockCache.getBlock(BlockCacheKeycacheKey,
     booleancaching,
     booleanrepeat,
    -booleanupdateCacheMetrics)
    +booleanupdateCacheMetrics)
    +Fetch block from cache.
    +
     
     
     Cacheable
    -InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
    +LruBlockCache.getBlock(BlockCacheKeycacheKey,
     booleancaching,
     booleanrepeat,
    -booleanupdateCacheMetrics)
    +booleanupdateCacheMetrics)
    +Get the buffer of the block with the specified name.
    +
     
     
     Cacheable
    -BlockCache.getBlock(BlockCacheKeycacheKey,
    +CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
     booleancaching,
     booleanrepeat,
    -booleanupdateCacheMetrics)
    -Fetch block from cache.
    -
    +booleanupdateCacheMetrics)
     
     
     Cacheable
    -LruBlockCache.getBlock(BlockCacheKeycacheKey,
    +InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
     booleancaching,
     booleanrepeat,
    -booleanupdateCacheMetrics)
    -Get the buffer of the block with the specified name.
    -
    +booleanupdateCacheMetrics)
     
     
     Cacheable
    @@ -308,11 +308,6 @@
     CombinedBlockCache.getRefCount(BlockCacheKeycacheKey)
     
     
    -void
    -CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
    -   Cacheableblock)
    -
    -
     default void
     BlockCache.returnBlock(BlockCacheKeycacheKey,
    Cacheableblock)
    @@ -320,6 +315,11 @@
      is over.
     
     
    +
    +void
    +CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
    +   Cacheableblock)
    +
     
     
     
    @@ -497,14 +497,14 @@
     
     
     void
    -BucketCache.BucketEntryGroup.add(https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
     title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
    -
    -
    -void
     CachedEntryQueue.add(https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
     title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryentry)
     Attempt to add the specified entry to this queue.
     
     
    +
    +void
    +BucketCache.BucketEntryGroup.add(https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true;
     title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html 
    

    [10/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
    b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    index 41b2105..dc4b7bd 100644
    --- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    +++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
    @@ -495,15 +495,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     static Filter
    -SingleColumnValueExcludeFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnPrefixFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ValueFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnCountGetFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -FamilyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +RowFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    @@ -513,69 +513,69 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     static Filter
    -ColumnPrefixFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +FirstKeyOnlyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -PageFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +TimestampsFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -RowFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ValueFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnRangeFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +KeyOnlyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnCountGetFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +FamilyFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -MultipleColumnPrefixFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +QualifierFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    -ColumnPaginationFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
    +ColumnRangeFilter.createFilterFromArguments(https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
     title="class or interface in 
    java.util">ArrayListbyte[]filterArguments)
     
     
     static Filter
    

    [10/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.html
    --
    diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.html
    index df5fa53..8fffb89 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.html
    @@ -42,1927 +42,2060 @@
     034import java.util.TreeMap;
     035import java.util.regex.Matcher;
     036import java.util.regex.Pattern;
    -037import 
    org.apache.hadoop.conf.Configuration;
    -038import 
    org.apache.hadoop.hbase.Cell.Type;
    -039import 
    org.apache.hadoop.hbase.client.Connection;
    -040import 
    org.apache.hadoop.hbase.client.ConnectionFactory;
    -041import 
    org.apache.hadoop.hbase.client.Consistency;
    -042import 
    org.apache.hadoop.hbase.client.Delete;
    -043import 
    org.apache.hadoop.hbase.client.Get;
    -044import 
    org.apache.hadoop.hbase.client.Mutation;
    -045import 
    org.apache.hadoop.hbase.client.Put;
    -046import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -047import 
    org.apache.hadoop.hbase.client.RegionInfoBuilder;
    -048import 
    org.apache.hadoop.hbase.client.RegionLocator;
    -049import 
    org.apache.hadoop.hbase.client.RegionReplicaUtil;
    -050import 
    org.apache.hadoop.hbase.client.RegionServerCallable;
    -051import 
    org.apache.hadoop.hbase.client.Result;
    -052import 
    org.apache.hadoop.hbase.client.ResultScanner;
    -053import 
    org.apache.hadoop.hbase.client.Scan;
    -054import 
    org.apache.hadoop.hbase.client.Table;
    -055import 
    org.apache.hadoop.hbase.client.TableState;
    -056import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -057import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    -058import 
    org.apache.hadoop.hbase.master.RegionState;
    -059import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    -060import 
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
    -061import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
    -062import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
    -063import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
    -064import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
    -065import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
    -066import 
    org.apache.hadoop.hbase.util.Bytes;
    -067import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -068import 
    org.apache.hadoop.hbase.util.ExceptionUtil;
    -069import 
    org.apache.hadoop.hbase.util.Pair;
    -070import 
    org.apache.hadoop.hbase.util.PairOfSameType;
    -071import 
    org.apache.yetus.audience.InterfaceAudience;
    -072import org.slf4j.Logger;
    -073import org.slf4j.LoggerFactory;
    -074
    -075import 
    org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
    -076
    -077/**
    -078 * p
    -079 * Read/write operations on region and 
    assignment information store in codehbase:meta/code.
    -080 * /p
    +037import java.util.stream.Collectors;
    +038import java.util.stream.Stream;
    +039import 
    org.apache.hadoop.conf.Configuration;
    +040import 
    org.apache.hadoop.hbase.Cell.Type;
    +041import 
    org.apache.hadoop.hbase.client.Connection;
    +042import 
    org.apache.hadoop.hbase.client.ConnectionFactory;
    +043import 
    org.apache.hadoop.hbase.client.Consistency;
    +044import 
    org.apache.hadoop.hbase.client.Delete;
    +045import 
    org.apache.hadoop.hbase.client.Get;
    +046import 
    org.apache.hadoop.hbase.client.Mutation;
    +047import 
    org.apache.hadoop.hbase.client.Put;
    +048import 
    org.apache.hadoop.hbase.client.RegionInfo;
    +049import 
    org.apache.hadoop.hbase.client.RegionInfoBuilder;
    +050import 
    org.apache.hadoop.hbase.client.RegionLocator;
    +051import 
    org.apache.hadoop.hbase.client.RegionReplicaUtil;
    +052import 
    org.apache.hadoop.hbase.client.RegionServerCallable;
    +053import 
    org.apache.hadoop.hbase.client.Result;
    +054import 
    org.apache.hadoop.hbase.client.ResultScanner;
    +055import 
    org.apache.hadoop.hbase.client.Scan;
    +056import 
    org.apache.hadoop.hbase.client.Table;
    +057import 
    org.apache.hadoop.hbase.client.TableState;
    +058import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    +059import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    +060import 
    org.apache.hadoop.hbase.master.RegionState;
    +061import 
    org.apache.hadoop.hbase.master.RegionState.State;
    +062import 
    org.apache.hadoop.hbase.protobuf.ProtobufUtil;
    +063import 
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
    +064import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
    +065import 
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
    +066import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
    +067import 
    org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
    +068import 
    

    [10/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/client/Result.html
    --
    diff --git a/apidocs/org/apache/hadoop/hbase/client/Result.html 
    b/apidocs/org/apache/hadoop/hbase/client/Result.html
    index 8ed792e..77b5567 100644
    --- a/apidocs/org/apache/hadoop/hbase/client/Result.html
    +++ b/apidocs/org/apache/hadoop/hbase/client/Result.html
    @@ -97,7 +97,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
    +https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
     
     
     org.apache.hadoop.hbase.client.Result
    @@ -115,13 +115,13 @@ var activeTableTab = "activeTableTab";
     
     @InterfaceAudience.Public
     public class Result
    -extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
    +extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements CellScannable, CellScanner
     Single row result of a Get or Scan query.
     
      This class is NOT THREAD SAFE.
     
    - Convenience methods are available that return various http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">Map
    + Convenience methods are available that return various https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">Map
      structures and values directly.
     
      To get a complete mapping of all cells in the Result, which can include
    @@ -309,43 +309,43 @@ implements 
     static Result
     create(Cell[]cells,
    -  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
    +  https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
       booleanstale)
     
     
     static Result
     create(Cell[]cells,
    -  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
    +  https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
       booleanstale,
       booleanmayHaveMoreCellsInRow)
     
     
     static Result
    -create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells)
    +create(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells)
     Instantiate a Result with the specified List of 
    KeyValues.
     
     
     
     static Result
    -create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells,
    -  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in 
    java.lang">Booleanexists)
    +create(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells,
    +  https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in 
    java.lang">Booleanexists)
     
     
     static Result
    -create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells,
    -  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
    +create(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells,
    +  https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
       booleanstale)
     
     
     static Result
    -create(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells,
    -  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
    +create(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListCellcells,
    +  https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Booleanexists,
       booleanstale,
       booleanmayHaveMoreCellsInRow)
     
     
     static Result
    -createCompleteResult(http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true;
     title="class or interface in 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
    --
    diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
    index 4d03740..2f29cd8 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/HTable.html
    @@ -170,1134 +170,1131 @@
     162  final RpcRetryingCallerFactory 
    rpcCallerFactory,
     163  final RpcControllerFactory 
    rpcControllerFactory,
     164  final ExecutorService pool) {
    -165if (connection == null || 
    connection.isClosed()) {
    -166  throw new 
    IllegalArgumentException("Connection is null or closed.");
    -167}
    -168this.connection = connection;
    -169this.configuration = 
    connection.getConfiguration();
    -170this.connConfiguration = 
    connection.getConnectionConfiguration();
    -171if (pool == null) {
    -172  this.pool = 
    getDefaultExecutor(this.configuration);
    -173  this.cleanupPoolOnClose = true;
    -174} else {
    -175  this.pool = pool;
    -176  this.cleanupPoolOnClose = false;
    -177}
    -178if (rpcCallerFactory == null) {
    -179  this.rpcCallerFactory = 
    connection.getNewRpcRetryingCallerFactory(configuration);
    -180} else {
    -181  this.rpcCallerFactory = 
    rpcCallerFactory;
    -182}
    -183
    -184if (rpcControllerFactory == null) {
    -185  this.rpcControllerFactory = 
    RpcControllerFactory.instantiate(configuration);
    -186} else {
    -187  this.rpcControllerFactory = 
    rpcControllerFactory;
    -188}
    -189
    -190this.tableName = builder.tableName;
    -191this.operationTimeoutMs = 
    builder.operationTimeout;
    -192this.rpcTimeoutMs = 
    builder.rpcTimeout;
    -193this.readRpcTimeoutMs = 
    builder.readRpcTimeout;
    -194this.writeRpcTimeoutMs = 
    builder.writeRpcTimeout;
    -195this.scannerCaching = 
    connConfiguration.getScannerCaching();
    -196this.scannerMaxResultSize = 
    connConfiguration.getScannerMaxResultSize();
    -197
    -198// puts need to track errors globally 
    due to how the APIs currently work.
    -199multiAp = 
    this.connection.getAsyncProcess();
    -200this.locator = new 
    HRegionLocator(tableName, connection);
    -201  }
    -202
    -203  /**
    -204   * @return maxKeyValueSize from 
    configuration.
    -205   */
    -206  public static int 
    getMaxKeyValueSize(Configuration conf) {
    -207return 
    conf.getInt(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY, -1);
    -208  }
    -209
    -210  @Override
    -211  public Configuration getConfiguration() 
    {
    -212return configuration;
    -213  }
    -214
    -215  @Override
    -216  public TableName getName() {
    -217return tableName;
    -218  }
    -219
    -220  /**
    -221   * emINTERNAL/em Used 
    by unit tests and tools to do low-level
    -222   * manipulations.
    -223   * @return A Connection instance.
    -224   */
    -225  @VisibleForTesting
    -226  protected Connection getConnection() 
    {
    -227return this.connection;
    -228  }
    -229
    -230  @Override
    -231  @Deprecated
    -232  public HTableDescriptor 
    getTableDescriptor() throws IOException {
    -233HTableDescriptor htd = 
    HBaseAdmin.getHTableDescriptor(tableName, connection, rpcCallerFactory,
    -234  rpcControllerFactory, 
    operationTimeoutMs, readRpcTimeoutMs);
    -235if (htd != null) {
    -236  return new 
    ImmutableHTableDescriptor(htd);
    -237}
    -238return null;
    -239  }
    -240
    -241  @Override
    -242  public TableDescriptor getDescriptor() 
    throws IOException {
    -243return 
    HBaseAdmin.getTableDescriptor(tableName, connection, rpcCallerFactory,
    -244  rpcControllerFactory, 
    operationTimeoutMs, readRpcTimeoutMs);
    -245  }
    -246
    -247  /**
    -248   * Get the corresponding start keys and 
    regions for an arbitrary range of
    -249   * keys.
    -250   * p
    -251   * @param startKey Starting row in 
    range, inclusive
    -252   * @param endKey Ending row in range
    -253   * @param includeEndKey true if endRow 
    is inclusive, false if exclusive
    -254   * @return A pair of list of start keys 
    and list of HRegionLocations that
    -255   * contain the specified 
    range
    -256   * @throws IOException if a remote or 
    network exception occurs
    -257   */
    -258  private PairListbyte[], 
    ListHRegionLocation getKeysAndRegionsInRange(
    -259  final byte[] startKey, final byte[] 
    endKey, final boolean includeEndKey)
    -260  throws IOException {
    -261return 
    getKeysAndRegionsInRange(startKey, endKey, includeEndKey, false);
    -262  }
    -263
    -264  /**
    -265   * Get the corresponding start keys and 
    regions for an arbitrary range of
    -266   * keys.
    -267   * p
    -268   * @param startKey Starting row in 
    range, inclusive
    -269   * @param endKey Ending row in range
    -270   * @param includeEndKey true if endRow 
    is inclusive, false if exclusive
    -271   * @param reload true to reload 
    information or false to use cached information
    -272   * @return A pair of list of start keys 
    and list of HRegionLocations that
    -273   * contain the specified 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    index 802b925..a3e80ab 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
    @@ -73,229 +73,229 @@
     065import 
    java.util.concurrent.TimeoutException;
     066import 
    java.util.concurrent.atomic.AtomicBoolean;
     067import 
    java.util.concurrent.atomic.AtomicInteger;
    -068import 
    java.util.concurrent.atomic.AtomicLong;
    -069import 
    java.util.concurrent.atomic.LongAdder;
    -070import java.util.concurrent.locks.Lock;
    -071import 
    java.util.concurrent.locks.ReadWriteLock;
    -072import 
    java.util.concurrent.locks.ReentrantReadWriteLock;
    -073import java.util.function.Function;
    -074import 
    org.apache.hadoop.conf.Configuration;
    -075import org.apache.hadoop.fs.FileStatus;
    -076import org.apache.hadoop.fs.FileSystem;
    -077import 
    org.apache.hadoop.fs.LocatedFileStatus;
    -078import org.apache.hadoop.fs.Path;
    -079import org.apache.hadoop.hbase.Cell;
    -080import 
    org.apache.hadoop.hbase.CellBuilderType;
    -081import 
    org.apache.hadoop.hbase.CellComparator;
    -082import 
    org.apache.hadoop.hbase.CellComparatorImpl;
    -083import 
    org.apache.hadoop.hbase.CellScanner;
    -084import 
    org.apache.hadoop.hbase.CellUtil;
    -085import 
    org.apache.hadoop.hbase.CompareOperator;
    -086import 
    org.apache.hadoop.hbase.CompoundConfiguration;
    -087import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -088import 
    org.apache.hadoop.hbase.DroppedSnapshotException;
    -089import 
    org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
    -090import 
    org.apache.hadoop.hbase.HConstants;
    -091import 
    org.apache.hadoop.hbase.HConstants.OperationStatusCode;
    -092import 
    org.apache.hadoop.hbase.HDFSBlocksDistribution;
    -093import 
    org.apache.hadoop.hbase.HRegionInfo;
    -094import 
    org.apache.hadoop.hbase.KeyValue;
    -095import 
    org.apache.hadoop.hbase.KeyValueUtil;
    -096import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -097import 
    org.apache.hadoop.hbase.NotServingRegionException;
    -098import 
    org.apache.hadoop.hbase.PrivateCellUtil;
    -099import 
    org.apache.hadoop.hbase.RegionTooBusyException;
    -100import 
    org.apache.hadoop.hbase.TableName;
    -101import org.apache.hadoop.hbase.Tag;
    -102import org.apache.hadoop.hbase.TagUtil;
    -103import 
    org.apache.hadoop.hbase.UnknownScannerException;
    -104import 
    org.apache.hadoop.hbase.client.Append;
    -105import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
    -106import 
    org.apache.hadoop.hbase.client.CompactionState;
    -107import 
    org.apache.hadoop.hbase.client.Delete;
    -108import 
    org.apache.hadoop.hbase.client.Durability;
    -109import 
    org.apache.hadoop.hbase.client.Get;
    -110import 
    org.apache.hadoop.hbase.client.Increment;
    -111import 
    org.apache.hadoop.hbase.client.IsolationLevel;
    -112import 
    org.apache.hadoop.hbase.client.Mutation;
    -113import 
    org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
    -114import 
    org.apache.hadoop.hbase.client.Put;
    -115import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -116import 
    org.apache.hadoop.hbase.client.RegionReplicaUtil;
    -117import 
    org.apache.hadoop.hbase.client.Result;
    -118import 
    org.apache.hadoop.hbase.client.RowMutations;
    -119import 
    org.apache.hadoop.hbase.client.Scan;
    -120import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -121import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder;
    -122import 
    org.apache.hadoop.hbase.conf.ConfigurationManager;
    -123import 
    org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
    -124import 
    org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
    -125import 
    org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
    -126import 
    org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
    -127import 
    org.apache.hadoop.hbase.exceptions.TimeoutIOException;
    -128import 
    org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
    -129import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    -130import 
    org.apache.hadoop.hbase.filter.FilterWrapper;
    -131import 
    org.apache.hadoop.hbase.filter.IncompatibleFilterException;
    -132import 
    org.apache.hadoop.hbase.io.HFileLink;
    -133import 
    org.apache.hadoop.hbase.io.HeapSize;
    -134import 
    org.apache.hadoop.hbase.io.TimeRange;
    -135import 
    org.apache.hadoop.hbase.io.hfile.HFile;
    -136import 
    org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
    -137import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -138import 
    org.apache.hadoop.hbase.ipc.RpcCall;
    -139import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -140import 
    org.apache.hadoop.hbase.monitoring.MonitoredTask;
    -141import 
    org.apache.hadoop.hbase.monitoring.TaskMonitor;
    -142import 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
    index bd13b53..802b925 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
    @@ -900,7600 +900,7598 @@
     892if 
    (this.getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {
     893  status.setStatus("Writing region 
    info on filesystem");
     894  fs.checkRegionInfoOnFilesystem();
    -895} else {
    -896  if (LOG.isDebugEnabled()) {
    -897LOG.debug("Skipping creation of 
    .regioninfo file for " + this.getRegionInfo());
    -898  }
    -899}
    -900
    -901// Initialize all the HStores
    -902status.setStatus("Initializing all 
    the Stores");
    -903long maxSeqId = 
    initializeStores(reporter, status);
    -904this.mvcc.advanceTo(maxSeqId);
    -905if 
    (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
    -906  CollectionHStore stores = 
    this.stores.values();
    -907  try {
    -908// update the stores that we are 
    replaying
    -909
    stores.forEach(HStore::startReplayingFromWAL);
    -910// Recover any edits if 
    available.
    -911maxSeqId = Math.max(maxSeqId,
    -912  
    replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
    status));
    -913// Make sure mvcc is up to max.
    -914this.mvcc.advanceTo(maxSeqId);
    -915  } finally {
    -916// update the stores that we are 
    done replaying
    -917
    stores.forEach(HStore::stopReplayingFromWAL);
    -918  }
    -919}
    -920this.lastReplayedOpenRegionSeqId = 
    maxSeqId;
    +895}
    +896
    +897// Initialize all the HStores
    +898status.setStatus("Initializing all 
    the Stores");
    +899long maxSeqId = 
    initializeStores(reporter, status);
    +900this.mvcc.advanceTo(maxSeqId);
    +901if 
    (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
    +902  CollectionHStore stores = 
    this.stores.values();
    +903  try {
    +904// update the stores that we are 
    replaying
    +905
    stores.forEach(HStore::startReplayingFromWAL);
    +906// Recover any edits if 
    available.
    +907maxSeqId = Math.max(maxSeqId,
    +908  
    replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
    status));
    +909// Make sure mvcc is up to max.
    +910this.mvcc.advanceTo(maxSeqId);
    +911  } finally {
    +912// update the stores that we are 
    done replaying
    +913
    stores.forEach(HStore::stopReplayingFromWAL);
    +914  }
    +915}
    +916this.lastReplayedOpenRegionSeqId = 
    maxSeqId;
    +917
    +918
    this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
    +919this.writestate.flushRequested = 
    false;
    +920this.writestate.compacting.set(0);
     921
    -922
    this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
    -923this.writestate.flushRequested = 
    false;
    -924this.writestate.compacting.set(0);
    -925
    -926if (this.writestate.writesEnabled) 
    {
    -927  // Remove temporary data left over 
    from old regions
    -928  status.setStatus("Cleaning up 
    temporary data from old regions");
    -929  fs.cleanupTempDir();
    -930}
    -931
    -932if (this.writestate.writesEnabled) 
    {
    -933  status.setStatus("Cleaning up 
    detritus from prior splits");
    -934  // Get rid of any splits or merges 
    that were lost in-progress.  Clean out
    -935  // these directories here on open.  
    We may be opening a region that was
    -936  // being split but we crashed in 
    the middle of it all.
    -937  fs.cleanupAnySplitDetritus();
    -938  fs.cleanupMergesDir();
    -939}
    -940
    -941// Initialize split policy
    -942this.splitPolicy = 
    RegionSplitPolicy.create(this, conf);
    -943
    -944// Initialize flush policy
    -945this.flushPolicy = 
    FlushPolicyFactory.create(this, conf);
    -946
    -947long lastFlushTime = 
    EnvironmentEdgeManager.currentTime();
    -948for (HStore store: stores.values()) 
    {
    -949  
    this.lastStoreFlushTimeMap.put(store, lastFlushTime);
    -950}
    -951
    -952// Use maximum of log sequenceid or 
    that which was found in stores
    -953// (particularly if no recovered 
    edits, seqid will be -1).
    -954long nextSeqid = maxSeqId;
    -955if (this.writestate.writesEnabled) 
    {
    -956  nextSeqid = 
    WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(),
    -957  this.fs.getRegionDir(), 
    nextSeqid, 1);
    -958} else {
    -959  nextSeqid++;
    -960}
    -961
    -962LOG.info("Onlined " + 
    this.getRegionInfo().getShortNameToLog() +
    -963  "; next sequenceid=" + 
    nextSeqid);
    +922if (this.writestate.writesEnabled) 
    {
    +923  // Remove 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
    index 9c13a58..4d04e3e 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
    @@ -133,11 +133,11 @@
     
     
     ProcedureExecutorMasterProcedureEnv
    -MasterServices.getMasterProcedureExecutor()
    +HMaster.getMasterProcedureExecutor()
     
     
     ProcedureExecutorMasterProcedureEnv
    -HMaster.getMasterProcedureExecutor()
    +MasterServices.getMasterProcedureExecutor()
     
     
     private RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,?
    @@ -194,15 +194,15 @@
     
     
     protected Procedure.LockState
    -GCRegionProcedure.acquireLock(MasterProcedureEnvenv)
    +RegionTransitionProcedure.acquireLock(MasterProcedureEnvenv)
     
     
     protected Procedure.LockState
    -MergeTableRegionsProcedure.acquireLock(MasterProcedureEnvenv)
    +GCRegionProcedure.acquireLock(MasterProcedureEnvenv)
     
     
     protected Procedure.LockState
    -RegionTransitionProcedure.acquireLock(MasterProcedureEnvenv)
    +MergeTableRegionsProcedure.acquireLock(MasterProcedureEnvenv)
     
     
     protected boolean
    @@ -295,7 +295,7 @@
     
     
     protected void
    -AssignProcedure.finishTransition(MasterProcedureEnvenv,
    +UnassignProcedure.finishTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode)
     
     
    @@ -305,7 +305,7 @@
     
     
     protected void
    -UnassignProcedure.finishTransition(MasterProcedureEnvenv,
    +AssignProcedure.finishTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode)
     
     
    @@ -314,7 +314,7 @@
     
     
     protected ProcedureMetrics
    -AssignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
    +UnassignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
     
     
     protected ProcedureMetrics
    @@ -326,7 +326,7 @@
     
     
     protected ProcedureMetrics
    -UnassignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
    +AssignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
     
     
     (package private) static 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse
    @@ -357,7 +357,7 @@
     
     
     ServerName
    -AssignProcedure.getServer(MasterProcedureEnvenv)
    +UnassignProcedure.getServer(MasterProcedureEnvenv)
     
     
     abstract ServerName
    @@ -367,7 +367,7 @@
     
     
     ServerName
    -UnassignProcedure.getServer(MasterProcedureEnvenv)
    +AssignProcedure.getServer(MasterProcedureEnvenv)
     
     
     private ServerName
    @@ -384,19 +384,19 @@
     
     
     protected boolean
    -MergeTableRegionsProcedure.hasLock(MasterProcedureEnvenv)
    +RegionTransitionProcedure.hasLock(MasterProcedureEnvenv)
     
     
     protected boolean
    -RegionTransitionProcedure.hasLock(MasterProcedureEnvenv)
    +MergeTableRegionsProcedure.hasLock(MasterProcedureEnvenv)
     
     
     protected boolean
    -MergeTableRegionsProcedure.holdLock(MasterProcedureEnvenv)
    +RegionTransitionProcedure.holdLock(MasterProcedureEnvenv)
     
     
     protected boolean
    -RegionTransitionProcedure.holdLock(MasterProcedureEnvenv)
    +MergeTableRegionsProcedure.holdLock(MasterProcedureEnvenv)
     
     
     private boolean
    @@ -510,15 +510,15 @@
     
     
     protected void
    -MergeTableRegionsProcedure.releaseLock(MasterProcedureEnvenv)
    +RegionTransitionProcedure.releaseLock(MasterProcedureEnvenv)
     
     
     protected void
    -RegionTransitionProcedure.releaseLock(MasterProcedureEnvenv)
    +MergeTableRegionsProcedure.releaseLock(MasterProcedureEnvenv)
     
     
     RemoteProcedureDispatcher.RemoteOperation
    -AssignProcedure.remoteCallBuild(MasterProcedureEnvenv,
    +UnassignProcedure.remoteCallBuild(MasterProcedureEnvenv,
    ServerNameserverName)
     
     
    @@ -528,12 +528,12 @@
     
     
     RemoteProcedureDispatcher.RemoteOperation
    -UnassignProcedure.remoteCallBuild(MasterProcedureEnvenv,
    +AssignProcedure.remoteCallBuild(MasterProcedureEnvenv,
    ServerNameserverName)
     
     
     protected boolean
    -AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
    +UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in 
    java.io">IOExceptionexception)
     
    @@ -545,7 +545,7 @@
     
     
     protected boolean
    -UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
    +AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in 
    java.io">IOExceptionexception)
     
    @@ -566,10 +566,10 @@
     
     
     protected void
    -AssignProcedure.reportTransition(MasterProcedureEnvenv,
    +UnassignProcedure.reportTransition(MasterProcedureEnvenv,
     

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html 
    b/devapidocs/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
    index 07bcaa6..3624639 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html
    @@ -18,8 +18,8 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":9,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":9,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":9,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10};
    -var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
    +var methods = 
    {"i0":10,"i1":9,"i2":10,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":9,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":42,"i21":10,"i22":10,"i23":10,"i24":9,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":9,"i33":9,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10};
    +var tabs = {65535:["t0","All Methods"],1:["t1","Static 
    Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
    Methods"],32:["t6","Deprecated Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
     var tableTab = "tableTab";
    @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public class FixedFileTrailer
    +public class FixedFileTrailer
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     The HFile has a 
    fixed trailer which contains offsets to other
      variable parts of the file. Also includes basic metadata on this file. The
    @@ -277,7 +277,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     Method Summary
     
    -All MethodsStatic MethodsInstance MethodsConcrete Methods
    +All MethodsStatic MethodsInstance MethodsConcrete MethodsDeprecated Methods
     
     Modifier and Type
     Method and Description
    @@ -374,54 +374,62 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     getFirstDataBlockOffset()
     
     
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String
    +getHBase1CompatibleName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringcomparator)
    +Deprecated.
    +Since hbase-2.0.0. Will be 
    removed in hbase-3.0.0.
    +
    +
    +
    +
     long
     getLastDataBlockOffset()
     
    -
    +
     long
     getLoadOnOpenDataOffset()
     
    -
    +
     int
     getMajorVersion()
     Returns the major version of this HFile format
     
     
    -
    +
     private static int
     getMaxTrailerSize()
     
    -
    +
     int
     getMetaIndexCount()
     
    -
    +
     int
     getMinorVersion()
     Returns the minor version of this HFile format
     
     
    -
    +
     int
     getNumDataIndexLevels()
     
    -
    +
     long
     getTotalUncompressedBytes()
     
    -
    +
     int
     getTrailerSize()
     
    -
    +
     (package private) static int
     getTrailerSize(intversion)
     
    -
    +
     long
     getUncompressedDataIndexSize()
     
    -
    +
     (package private) static int
     materializeVersion(intmajorVersion,
       intminorVersion)
    @@ -429,78 +437,82 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
      minor and major version numbers.
     
     
    -
    +
     static FixedFileTrailer
     readFromStream(org.apache.hadoop.fs.FSDataInputStreamistream,
       longfileSize)
     Reads a file trailer from the given file.
     
     
    -
    +
     (package private) void
     serialize(http://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true;
     title="class or interface in 
    java.io">DataOutputStreamoutputStream)
     Write the trailer to a data stream.
     
     
    -
    +
     (package private) void
     serializeAsPB(http://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true;
     title="class or interface in java.io">DataOutputStreamoutput)
    -Write trailer data as protobuf
    +Write trailer data as protobuf.
     
     
    -
    +
     void
     setComparatorClass(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
     title="class or interface in java.lang">Class? extends CellComparatorklass)
     
    -
    +
     void
     setCompressionCodec(Compression.AlgorithmcompressionCodec)
     
    -
    +
     void
     setDataIndexCount(intdataIndexCount)
     
    -
    +
     void
     setEncryptionKey(byte[]keyBytes)
     
    -
    +
     void
     setEntryCount(longnewEntryCount)
     
    -
    +
     void
     setFileInfoOffset(longfileInfoOffset)
     
    -
    +
     void
     

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
    index 9638c2e..ee7e1b8 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
    @@ -246,7 +246,7 @@
     
     
     protected void
    -AssignProcedure.finishTransition(MasterProcedureEnvenv,
    +UnassignProcedure.finishTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode)
     
     
    @@ -256,7 +256,7 @@
     
     
     protected void
    -UnassignProcedure.finishTransition(MasterProcedureEnvenv,
    +AssignProcedure.finishTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode)
     
     
    @@ -307,7 +307,7 @@
     
     
     protected boolean
    -AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
    +UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in 
    java.io">IOExceptionexception)
     
    @@ -319,7 +319,7 @@
     
     
     protected boolean
    -UnassignProcedure.remoteCallFailed(MasterProcedureEnvenv,
    +AssignProcedure.remoteCallFailed(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in 
    java.io">IOExceptionexception)
     
    @@ -344,10 +344,10 @@
     
     
     protected void
    -AssignProcedure.reportTransition(MasterProcedureEnvenv,
    +UnassignProcedure.reportTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCodecode,
    -longopenSeqNum)
    +longseqId)
     
     
     protected abstract void
    @@ -358,10 +358,10 @@
     
     
     protected void
    -UnassignProcedure.reportTransition(MasterProcedureEnvenv,
    +AssignProcedure.reportTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCodecode,
    -longseqId)
    +longopenSeqNum)
     
     
     private boolean
    @@ -372,7 +372,7 @@
     
     
     protected boolean
    -AssignProcedure.startTransition(MasterProcedureEnvenv,
    +UnassignProcedure.startTransition(MasterProcedureEnvenv,
    RegionStates.RegionStateNoderegionNode)
     
     
    @@ -382,7 +382,7 @@
     
     
     protected boolean
    -UnassignProcedure.startTransition(MasterProcedureEnvenv,
    +AssignProcedure.startTransition(MasterProcedureEnvenv,
    RegionStates.RegionStateNoderegionNode)
     
     
    @@ -399,7 +399,7 @@
     
     
     protected boolean
    -AssignProcedure.updateTransition(MasterProcedureEnvenv,
    +UnassignProcedure.updateTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode)
     
     
    @@ -411,7 +411,7 @@
     
     
     protected boolean
    -UnassignProcedure.updateTransition(MasterProcedureEnvenv,
    +AssignProcedure.updateTransition(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode)
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
    index b9977f2..0251f89 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.Action.html
    @@ -137,14 +137,6 @@
     
     
     
    -protected BaseLoadBalancer.Cluster.Action
    -FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
    -
    -
    -(package private) BaseLoadBalancer.Cluster.Action
    -FavoredStochasticBalancer.FavoredNodeLoadPicker.generate(BaseLoadBalancer.Clustercluster)
    -
    -
     (package private) abstract BaseLoadBalancer.Cluster.Action
     StochasticLoadBalancer.CandidateGenerator.generate(BaseLoadBalancer.Clustercluster)
     
    @@ -170,6 +162,14 @@
     
     
     protected BaseLoadBalancer.Cluster.Action
    +FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
    +
    +
    +(package private) BaseLoadBalancer.Cluster.Action
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
    b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
    index 49f85aa..6e37f0b 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
    @@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     ImmutableBytesWritable
    -TableRecordReader.createKey()
    +TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
     
     
     ImmutableBytesWritable
    -TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
    +TableRecordReader.createKey()
     
     
     ImmutableBytesWritable
    @@ -183,11 +183,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
    -TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
    +TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
    org.apache.hadoop.mapred.JobConfjob,
    -   org.apache.hadoop.mapred.Reporterreporter)
    -Builds a TableRecordReader.
    -
    +   
    org.apache.hadoop.mapred.Reporterreporter)
     
     
     org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
    @@ -197,9 +195,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
    -TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
    +TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
    org.apache.hadoop.mapred.JobConfjob,
    -   
    org.apache.hadoop.mapred.Reporterreporter)
    +   org.apache.hadoop.mapred.Reporterreporter)
    +Builds a TableRecordReader.
    +
     
     
     
    @@ -218,12 +218,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     void
    -IdentityTableMap.map(ImmutableBytesWritablekey,
    -   Resultvalue,
    +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
    +   Resultvalues,
    org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
    -   org.apache.hadoop.mapred.Reporterreporter)
    -Pass the key, value to reduce
    -
    +   org.apache.hadoop.mapred.Reporterreporter)
     
     
     void
    @@ -236,19 +234,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     void
    -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
    -   Resultvalues,
    +IdentityTableMap.map(ImmutableBytesWritablekey,
    +   Resultvalue,
    org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
    -   org.apache.hadoop.mapred.Reporterreporter)
    +   org.apache.hadoop.mapred.Reporterreporter)
    +Pass the key, value to reduce
    +
     
     
     boolean
    -TableRecordReader.next(ImmutableBytesWritablekey,
    +TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
     Resultvalue)
     
     
     boolean
    -TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
    +TableRecordReader.next(ImmutableBytesWritablekey,
     Resultvalue)
     
     
    @@ -281,12 +281,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     void
    -IdentityTableMap.map(ImmutableBytesWritablekey,
    -   Resultvalue,
    +RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
    +   Resultvalues,
    org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
    -   org.apache.hadoop.mapred.Reporterreporter)
    -Pass the key, value to reduce
    -
    +   org.apache.hadoop.mapred.Reporterreporter)
     
     
     void
    @@ -299,10 +297,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     void
    -RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
    -   Resultvalues,
    +IdentityTableMap.map(ImmutableBytesWritablekey,
    +   Resultvalue,
    org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
    -   org.apache.hadoop.mapred.Reporterreporter)
    +   org.apache.hadoop.mapred.Reporterreporter)
    +Pass the key, value to reduce
    +
     
     
     void
    @@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     private ImmutableBytesWritable
    -MultithreadedTableMapper.SubMapRecordReader.key
    +TableRecordReaderImpl.key
     
     
     private ImmutableBytesWritable
    @@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     private ImmutableBytesWritable
    -TableRecordReaderImpl.key
    +MultithreadedTableMapper.SubMapRecordReader.key
     
     
     (package private) ImmutableBytesWritable
    @@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     ImmutableBytesWritable
    -MultithreadedTableMapper.SubMapRecordReader.getCurrentKey()
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    index cfc5ad9..9bd0287 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/class-use/BaseLoadBalancer.Cluster.html
    @@ -157,14 +157,6 @@
     
     
     
    -protected BaseLoadBalancer.Cluster.Action
    -FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
    -
    -
    -(package private) BaseLoadBalancer.Cluster.Action
    -FavoredStochasticBalancer.FavoredNodeLoadPicker.generate(BaseLoadBalancer.Clustercluster)
    -
    -
     (package private) abstract BaseLoadBalancer.Cluster.Action
     StochasticLoadBalancer.CandidateGenerator.generate(BaseLoadBalancer.Clustercluster)
     
    @@ -189,6 +181,14 @@
     StochasticLoadBalancer.RegionReplicaRackCandidateGenerator.generate(BaseLoadBalancer.Clustercluster)
     
     
    +protected BaseLoadBalancer.Cluster.Action
    +FavoredStochasticBalancer.FavoredNodeLocalityPicker.generate(BaseLoadBalancer.Clustercluster)
    +
    +
    +(package private) BaseLoadBalancer.Cluster.Action
    +FavoredStochasticBalancer.FavoredNodeLoadPicker.generate(BaseLoadBalancer.Clustercluster)
    +
    +
     private int
     FavoredStochasticBalancer.FavoredNodeLocalityPicker.getDifferentFavoredNode(BaseLoadBalancer.Clustercluster,
    http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListServerNamefavoredNodes,
    @@ -247,12 +247,12 @@
     
     
     private int
    -FavoredStochasticBalancer.FavoredNodeLoadPicker.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
    +StochasticLoadBalancer.LoadCandidateGenerator.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
      intthisServer)
     
     
     private int
    -StochasticLoadBalancer.LoadCandidateGenerator.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
    +FavoredStochasticBalancer.FavoredNodeLoadPicker.pickLeastLoadedServer(BaseLoadBalancer.Clustercluster,
      intthisServer)
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    index d29d09b..ed61e0f 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/CatalogJanitor.html
    @@ -117,11 +117,11 @@
     
     
     CatalogJanitor
    -MasterServices.getCatalogJanitor()
    +HMaster.getCatalogJanitor()
     
     
     CatalogJanitor
    -HMaster.getCatalogJanitor()
    +MasterServices.getCatalogJanitor()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    index fb802a0..fcc4f20 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/ClusterSchema.html
    @@ -132,11 +132,11 @@
     
     
     ClusterSchema
    -MasterServices.getClusterSchema()
    +HMaster.getClusterSchema()
     
     
     ClusterSchema
    -HMaster.getClusterSchema()
    +MasterServices.getClusterSchema()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
    index ce449a7..b76d2e9 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
    @@ -212,11 +212,11 @@
     
     
     private HMaster
    -MasterMobCompactionThread.master
    +HMaster.InitializationMonitor.master
     
     
     private HMaster
    -MetricsMasterWrapperImpl.master
    +ClusterStatusPublisher.master
     
     
     private HMaster
    @@ -224,15 +224,15 @@
     
     
     private HMaster
    -ExpiredMobFileCleanerChore.master
    +MetricsMasterWrapperImpl.master
     
     
     private HMaster
    -HMaster.InitializationMonitor.master
    +MasterRpcServices.master
     
     
     private HMaster
    -MasterRpcServices.master
    +MasterMobCompactionThread.master
     
     
     private HMaster
    @@ -240,7 +240,7 @@
     
     
     private HMaster
    -ClusterStatusPublisher.master
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/class-use/SplitLogManager.Task.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/SplitLogManager.Task.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/SplitLogManager.Task.html
    index c217815..0f72177 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/SplitLogManager.Task.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/SplitLogManager.Task.html
    @@ -147,18 +147,18 @@
     
     
     boolean
    -ZKSplitLogManagerCoordination.resubmitTask(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringpath,
    -SplitLogManager.Tasktask,
    -SplitLogManager.ResubmitDirectivedirective)
    -
    -
    -boolean
     SplitLogManagerCoordination.resubmitTask(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringtaskName,
     SplitLogManager.Tasktask,
     SplitLogManager.ResubmitDirectiveforce)
     Resubmit the task in case if found unassigned or 
    failed
     
     
    +
    +boolean
    +ZKSplitLogManagerCoordination.resubmitTask(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">Stringpath,
    +SplitLogManager.Tasktask,
    +SplitLogManager.ResubmitDirectivedirective)
    +
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html 
    b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
    index ba0cca5..3117787 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
    @@ -121,11 +121,11 @@
     
     
     TableStateManager
    -HMaster.getTableStateManager()
    +MasterServices.getTableStateManager()
     
     
     TableStateManager
    -MasterServices.getTableStateManager()
    +HMaster.getTableStateManager()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/locking/class-use/LockManager.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/locking/class-use/LockManager.html 
    b/devapidocs/org/apache/hadoop/hbase/master/locking/class-use/LockManager.html
    index a444123..b5b7703 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/locking/class-use/LockManager.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/locking/class-use/LockManager.html
    @@ -117,11 +117,11 @@
     
     
     LockManager
    -HMaster.getLockManager()
    +MasterServices.getLockManager()
     
     
     LockManager
    -MasterServices.getLockManager()
    +HMaster.getLockManager()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
    index d589aaa..f476c74 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
    @@ -104,15 +104,15 @@
     
     
     NormalizationPlan.PlanType
    -MergeNormalizationPlan.getType()
    +NormalizationPlan.getType()
     
     
     NormalizationPlan.PlanType
    -NormalizationPlan.getType()
    +SplitNormalizationPlan.getType()
     
     
     NormalizationPlan.PlanType
    -SplitNormalizationPlan.getType()
    +MergeNormalizationPlan.getType()
     
     
     NormalizationPlan.PlanType
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/RegionNormalizer.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/RegionNormalizer.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/RegionNormalizer.html
    index ad4e9b4..d8fb2f6 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/RegionNormalizer.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/RegionNormalizer.html
    @@ -125,11 +125,11 @@
     
     
     RegionNormalizer
    -HMaster.getRegionNormalizer()
    +MasterServices.getRegionNormalizer()
     
     
     RegionNormalizer
    -MasterServices.getRegionNormalizer()
    +HMaster.getRegionNormalizer()
     
     
     
    
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    index 7515d7b..3c4825d 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
    @@ -762,7 +762,7 @@
     754boolean wasUp = 
    this.clusterStatusTracker.isClusterUp();
     755if (!wasUp) 
    this.clusterStatusTracker.setClusterUp();
     756
    -757LOG.info("Server active/primary 
    master=" + this.serverName +
    +757LOG.info("Active/primary master=" + 
    this.serverName +
     758", sessionid=0x" +
     759
    Long.toHexString(this.zooKeeper.getRecoverableZooKeeper().getSessionId()) +
     760", setting cluster-up flag (Was=" 
    + wasUp + ")");
    @@ -1161,7 +1161,7 @@
     1153   startProcedureExecutor();
     1154
     1155   // Start log cleaner thread
    -1156   int cleanerInterval = 
    conf.getInt("hbase.master.cleaner.interval", 60 * 1000);
    +1156   int cleanerInterval = 
    conf.getInt("hbase.master.cleaner.interval", 600 * 1000);
     1157   this.logCleaner =
     1158  new LogCleaner(cleanerInterval,
     1159 this, conf, 
    getMasterWalManager().getFileSystem(),
    @@ -1227,2368 +1227,2369 @@
     1219procedureExecutor = new 
    ProcedureExecutor(conf, procEnv, procedureStore, procedureScheduler);
     1220
    configurationManager.registerObserver(procEnv);
     1221
    -1222final int numThreads = 
    conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS,
    -1223
    Math.max(Runtime.getRuntime().availableProcessors(),
    -1224  
    MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
    -1225final boolean abortOnCorruption = 
    conf.getBoolean(
    -1226
    MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION,
    -1227
    MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
    -1228procedureStore.start(numThreads);
    -1229procedureExecutor.start(numThreads, 
    abortOnCorruption);
    -1230
    procEnv.getRemoteDispatcher().start();
    -1231  }
    -1232
    -1233  private void stopProcedureExecutor() 
    {
    -1234if (procedureExecutor != null) {
    -1235  
    configurationManager.deregisterObserver(procedureExecutor.getEnvironment());
    -1236  
    procedureExecutor.getEnvironment().getRemoteDispatcher().stop();
    -1237  procedureExecutor.stop();
    -1238  procedureExecutor.join();
    -1239  procedureExecutor = null;
    -1240}
    -1241
    -1242if (procedureStore != null) {
    -1243  
    procedureStore.stop(isAborted());
    -1244  procedureStore = null;
    -1245}
    -1246  }
    -1247
    -1248  private void stopChores() {
    -1249if (this.expiredMobFileCleanerChore 
    != null) {
    -1250  
    this.expiredMobFileCleanerChore.cancel(true);
    -1251}
    -1252if (this.mobCompactChore != null) 
    {
    -1253  
    this.mobCompactChore.cancel(true);
    -1254}
    -1255if (this.balancerChore != null) {
    -1256  this.balancerChore.cancel(true);
    -1257}
    -1258if (this.normalizerChore != null) 
    {
    -1259  
    this.normalizerChore.cancel(true);
    -1260}
    -1261if (this.clusterStatusChore != null) 
    {
    -1262  
    this.clusterStatusChore.cancel(true);
    -1263}
    -1264if (this.catalogJanitorChore != 
    null) {
    -1265  
    this.catalogJanitorChore.cancel(true);
    -1266}
    -1267if (this.clusterStatusPublisherChore 
    != null){
    -1268  
    clusterStatusPublisherChore.cancel(true);
    -1269}
    -1270if (this.mobCompactThread != null) 
    {
    -1271  this.mobCompactThread.close();
    -1272}
    -1273
    -1274if (this.quotaObserverChore != null) 
    {
    -1275  quotaObserverChore.cancel();
    -1276}
    -1277if (this.snapshotQuotaChore != null) 
    {
    -1278  snapshotQuotaChore.cancel();
    -1279}
    -1280  }
    -1281
    -1282  /**
    -1283   * @return Get remote side's 
    InetAddress
    -1284   */
    -1285  InetAddress getRemoteInetAddress(final 
    int port,
    -1286  final long serverStartCode) throws 
    UnknownHostException {
    -1287// Do it out here in its own little 
    method so can fake an address when
    -1288// mocking up in tests.
    -1289InetAddress ia = 
    RpcServer.getRemoteIp();
    -1290
    -1291// The call could be from the local 
    regionserver,
    -1292// in which case, there is no remote 
    address.
    -1293if (ia == null  
    serverStartCode == startcode) {
    -1294  InetSocketAddress isa = 
    rpcServices.getSocketAddress();
    -1295  if (isa != null  
    isa.getPort() == port) {
    -1296ia = isa.getAddress();
    -1297  }
    -1298}
    -1299return ia;
    -1300  }
    -1301
    -1302  /**
    -1303   * @return Maximum time we should run 
    balancer for
    -1304   */
    -1305  private int getMaxBalancingTime() {
    -1306int maxBalancingTime = 
    getConfiguration().getInt(HConstants.HBASE_BALANCER_MAX_BALANCING, -1);
    -1307if 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    index 4a87b9d..7515d7b 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
    @@ -25,3542 +25,3570 @@
     017 */
     018package org.apache.hadoop.hbase.master;
     019
    -020import com.google.protobuf.Descriptors;
    -021import com.google.protobuf.Service;
    -022import java.io.IOException;
    -023import java.io.InterruptedIOException;
    -024import java.lang.reflect.Constructor;
    -025import 
    java.lang.reflect.InvocationTargetException;
    -026import java.net.InetAddress;
    -027import java.net.InetSocketAddress;
    -028import java.net.UnknownHostException;
    -029import java.util.ArrayList;
    -030import java.util.Arrays;
    -031import java.util.Collection;
    -032import java.util.Collections;
    -033import java.util.Comparator;
    -034import java.util.EnumSet;
    -035import java.util.HashMap;
    -036import java.util.Iterator;
    -037import java.util.List;
    -038import java.util.Map;
    -039import java.util.Map.Entry;
    -040import java.util.Objects;
    -041import java.util.Set;
    -042import 
    java.util.concurrent.ExecutionException;
    -043import java.util.concurrent.Future;
    -044import java.util.concurrent.TimeUnit;
    -045import 
    java.util.concurrent.TimeoutException;
    -046import 
    java.util.concurrent.atomic.AtomicInteger;
    -047import 
    java.util.concurrent.atomic.AtomicReference;
    -048import java.util.function.Function;
    -049import java.util.regex.Pattern;
    -050import java.util.stream.Collectors;
    -051import javax.servlet.ServletException;
    -052import javax.servlet.http.HttpServlet;
    -053import 
    javax.servlet.http.HttpServletRequest;
    -054import 
    javax.servlet.http.HttpServletResponse;
    -055import 
    org.apache.commons.lang3.StringUtils;
    -056import 
    org.apache.hadoop.conf.Configuration;
    -057import org.apache.hadoop.fs.Path;
    -058import 
    org.apache.hadoop.hbase.ClusterId;
    -059import 
    org.apache.hadoop.hbase.ClusterMetrics;
    -060import 
    org.apache.hadoop.hbase.ClusterMetrics.Option;
    -061import 
    org.apache.hadoop.hbase.ClusterMetricsBuilder;
    -062import 
    org.apache.hadoop.hbase.CoordinatedStateException;
    -063import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -064import 
    org.apache.hadoop.hbase.HBaseIOException;
    -065import 
    org.apache.hadoop.hbase.HBaseInterfaceAudience;
    -066import 
    org.apache.hadoop.hbase.HConstants;
    -067import 
    org.apache.hadoop.hbase.InvalidFamilyOperationException;
    -068import 
    org.apache.hadoop.hbase.MasterNotRunningException;
    -069import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -070import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -071import 
    org.apache.hadoop.hbase.PleaseHoldException;
    -072import 
    org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
    -073import 
    org.apache.hadoop.hbase.ServerLoad;
    -074import 
    org.apache.hadoop.hbase.ServerMetricsBuilder;
    -075import 
    org.apache.hadoop.hbase.ServerName;
    -076import 
    org.apache.hadoop.hbase.TableDescriptors;
    -077import 
    org.apache.hadoop.hbase.TableName;
    -078import 
    org.apache.hadoop.hbase.TableNotDisabledException;
    -079import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -080import 
    org.apache.hadoop.hbase.UnknownRegionException;
    -081import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
    -082import 
    org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
    -083import 
    org.apache.hadoop.hbase.client.MasterSwitchType;
    -084import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -085import 
    org.apache.hadoop.hbase.client.Result;
    -086import 
    org.apache.hadoop.hbase.client.TableDescriptor;
    -087import 
    org.apache.hadoop.hbase.client.TableDescriptorBuilder;
    -088import 
    org.apache.hadoop.hbase.client.TableState;
    -089import 
    org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
    -090import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -091import 
    org.apache.hadoop.hbase.exceptions.MergeRegionException;
    -092import 
    org.apache.hadoop.hbase.executor.ExecutorType;
    -093import 
    org.apache.hadoop.hbase.favored.FavoredNodesManager;
    -094import 
    org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
    -095import 
    org.apache.hadoop.hbase.http.InfoServer;
    -096import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -097import 
    org.apache.hadoop.hbase.ipc.RpcServer;
    -098import 
    org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
    -099import 
    org.apache.hadoop.hbase.log.HBaseMarkers;
    -100import 
    org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
    -101import 
    org.apache.hadoop.hbase.master.assignment.AssignmentManager;
    -102import 
    org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
    -103import 
    org.apache.hadoop.hbase.master.assignment.RegionStates;
    -104import 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.html
    index d00d3ac..3d3eefb 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.html
    @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class TestBlockEvictionFromClient
    +public class TestBlockEvictionFromClient
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     
     
    @@ -169,82 +169,86 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Field and Description
     
     
    +static HBaseClassTestRule
    +CLASS_RULE
    +
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">CountDownLatch
     compactionLatch
     
    -
    +
     private static byte[]
     data
     
    -
    +
     private static byte[]
     data2
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">CountDownLatch
     exceptionLatch
     
    -
    +
     private static byte[][]
     FAMILIES_1
     
    -
    +
     private static byte[]
     FAMILY
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">CountDownLatch
     getLatch
     
    -
    +
     private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
     title="class or interface in 
    java.util.concurrent">CountDownLatch
     latch
     
    -
    +
     private static org.slf4j.Logger
     LOG
     
    -
    +
     org.junit.rules.TestName
     name
     
    -
    +
     private static int
     NO_OF_THREADS
     
    -
    +
     private static byte[]
     QUALIFIER
     
    -
    +
     private static byte[]
     QUALIFIER2
     
    -
    +
     private static byte[]
     ROW
     
    -
    +
     private static byte[]
     ROW1
     
    -
    +
     private static byte[]
     ROW2
     
    -
    +
     private static byte[]
     ROW3
     
    -
    +
     (package private) static byte[][]
     ROWS
     
    -
    +
     protected static int
     SLAVES
     
    -
    +
     protected static HBaseTestingUtility
     TEST_UTIL
     
    @@ -415,13 +419,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     Field Detail
    +
    +
    +
    +
    +
    +CLASS_RULE
    +public static finalHBaseClassTestRule CLASS_RULE
    +
    +
     
     
     
     
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -430,7 +443,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     TEST_UTIL
    -protected static finalHBaseTestingUtility TEST_UTIL
    +protected static finalHBaseTestingUtility TEST_UTIL
     
     
     
    @@ -439,7 +452,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     ROWS
    -staticbyte[][] ROWS
    +staticbyte[][] ROWS
     
     
     
    @@ -448,7 +461,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     NO_OF_THREADS
    -private staticint NO_OF_THREADS
    +private staticint NO_OF_THREADS
     
     
     
    @@ -457,7 +470,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     ROW
    -private staticbyte[] ROW
    +private staticbyte[] ROW
     
     
     
    @@ -466,7 +479,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     ROW1
    -private staticbyte[] ROW1
    +private staticbyte[] ROW1
     
     
     
    @@ -475,7 +488,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     ROW2
    -private staticbyte[] ROW2
    +private staticbyte[] ROW2
     
     
     
    @@ -484,7 +497,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     ROW3
    -private staticbyte[] ROW3
    +private staticbyte[] ROW3
     
     
     
    @@ -493,7 +506,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     FAMILY
    -private staticbyte[] FAMILY
    +private staticbyte[] FAMILY
     
     
     
    @@ -502,7 +515,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     FAMILIES_1
    -private staticbyte[][] FAMILIES_1
    +private staticbyte[][] FAMILIES_1
     
     
     
    @@ -511,7 +524,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     QUALIFIER
    -private staticbyte[] QUALIFIER
    +private staticbyte[] QUALIFIER
     
     
     
    @@ -520,7 +533,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     QUALIFIER2
    -private staticbyte[] QUALIFIER2
    +private staticbyte[] QUALIFIER2
     
     
     
    @@ -529,7 +542,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     data
    -private staticbyte[] data
    +private staticbyte[] data
     
     
     
    @@ -538,7 +551,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     data2
    -private staticbyte[] data2
    +private staticbyte[] 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    index 9baf566..460989c 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
    @@ -71,7 +71,7 @@
     063  private String targetRootDir;
     064  private boolean isOverwrite;
     065
    -066  public RestoreTablesClient(Connection 
    conn, RestoreRequest request) throws IOException {
    +066  public RestoreTablesClient(Connection 
    conn, RestoreRequest request) {
     067this.targetRootDir = 
    request.getBackupRootDir();
     068this.backupId = 
    request.getBackupId();
     069this.sTableArray = 
    request.getFromTables();
    @@ -82,206 +82,197 @@
     074this.isOverwrite = 
    request.isOverwrite();
     075this.conn = conn;
     076this.conf = 
    conn.getConfiguration();
    -077
    -078  }
    -079
    -080  /**
    -081   * Validate target tables
    -082   * @param conn connection
    -083   * @param mgr table state manager
    -084   * @param tTableArray: target tables
    -085   * @param isOverwrite overwrite 
    existing table
    -086   * @throws IOException exception
    -087   */
    -088  private void 
    checkTargetTables(TableName[] tTableArray, boolean isOverwrite) throws 
    IOException {
    -089ArrayListTableName 
    existTableList = new ArrayList();
    -090ArrayListTableName 
    disabledTableList = new ArrayList();
    -091
    -092// check if the tables already 
    exist
    -093try (Admin admin = conn.getAdmin()) 
    {
    -094  for (TableName tableName : 
    tTableArray) {
    -095if (admin.tableExists(tableName)) 
    {
    -096  
    existTableList.add(tableName);
    -097  if 
    (admin.isTableDisabled(tableName)) {
    -098
    disabledTableList.add(tableName);
    -099  }
    -100} else {
    -101  LOG.info("HBase table " + 
    tableName
    -102  + " does not exist. It will 
    be created during restore process");
    -103}
    -104  }
    -105}
    -106
    -107if (existTableList.size()  0) {
    -108  if (!isOverwrite) {
    -109LOG.error("Existing table (" + 
    existTableList
    -110+ ") found in the restore 
    target, please add "
    -111+ "\"-o\" as overwrite option 
    in the command if you mean"
    -112+ " to restore to these 
    existing tables");
    -113throw new IOException("Existing 
    table found in target while no \"-o\" "
    -114+ "as overwrite option 
    found");
    -115  } else {
    -116if (disabledTableList.size()  
    0) {
    -117  LOG.error("Found offline table 
    in the restore target, "
    -118  + "please enable them 
    before restore with \"-overwrite\" option");
    -119  LOG.info("Offline table list in 
    restore target: " + disabledTableList);
    -120  throw new IOException(
    -121  "Found offline table in the 
    target when restore with \"-overwrite\" option");
    -122}
    -123  }
    -124}
    -125  }
    -126
    -127  /**
    -128   * Restore operation handle each 
    backupImage in array
    -129   * @param svc: master services
    -130   * @param images: array BackupImage
    -131   * @param sTable: table to be 
    restored
    -132   * @param tTable: table to be restored 
    to
    -133   * @param truncateIfExists: truncate 
    table
    -134   * @throws IOException exception
    -135   */
    -136
    -137  private void 
    restoreImages(BackupImage[] images, TableName sTable, TableName tTable,
    -138  boolean truncateIfExists) throws 
    IOException {
    -139
    -140// First image MUST be image of a 
    FULL backup
    -141BackupImage image = images[0];
    -142String rootDir = 
    image.getRootDir();
    -143String backupId = 
    image.getBackupId();
    -144Path backupRoot = new 
    Path(rootDir);
    -145RestoreTool restoreTool = new 
    RestoreTool(conf, backupRoot, backupId);
    -146Path tableBackupPath = 
    HBackupFileSystem.getTableBackupPath(sTable, backupRoot, backupId);
    -147String lastIncrBackupId = 
    images.length == 1 ? null : images[images.length - 1].getBackupId();
    -148// We need hFS only for full restore 
    (see the code)
    -149BackupManifest manifest = 
    HBackupFileSystem.getManifest(conf, backupRoot, backupId);
    -150if (manifest.getType() == 
    BackupType.FULL) {
    -151  LOG.info("Restoring '" + sTable + 
    "' to '" + tTable + "' from full" + " backup image "
    -152  + 
    tableBackupPath.toString());
    -153  conf.set(JOB_NAME_CONF_KEY, 
    "Full_Restore-" + backupId + "-" + tTable);
    -154  restoreTool.fullRestoreTable(conn, 
    tableBackupPath, sTable, tTable, truncateIfExists,
    -155lastIncrBackupId);
    -156  conf.unset(JOB_NAME_CONF_KEY);
    -157} else { // incremental Backup
    -158  throw new IOException("Unexpected 
    backup type " + image.getType());
    -159}
    -160
    -161if (images.length == 1) {
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.html
    index cc203e4..f1a1387 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.html
    @@ -100,12 +100,6 @@ var activeTableTab = "activeTableTab";
     http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
     
     
    -junit.framework.Assert
    -
    -
    -junit.framework.TestCase
    -
    -
     org.apache.hadoop.hbase.rest.model.TestModelBaseorg.apache.hadoop.hbase.rest.model.NamespacesInstanceModel
     
     
    @@ -115,20 +109,12 @@ var activeTableTab = "activeTableTab";
     
     
     
    -
    -
    -
    -
     
     
     
    -
    -All Implemented Interfaces:
    -junit.framework.Test
    -
     
     
    -public class TestNamespacesInstanceModel
    +public class TestNamespacesInstanceModel
     extends TestModelBaseorg.apache.hadoop.hbase.rest.model.NamespacesInstanceModel
     
     
    @@ -235,18 +221,11 @@ extends fromJSON,
     fromPB,
     fromXML,
     testFromJSON,
     testToJSON,
     testToXML,
     toJSON,
     toPB,
     toXML
     
     
    -
    -
    -
    -Methods inherited from classjunit.framework.TestCase
    -assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertEquals, assertEquals, assertEquals, assertEquals, assertEquals, 
    assertFalse, assertFalse, assertNotNull, assertNotNull, assertNotSame, 
    assertNotSame, assertNull, assertNull, assertSame, assertSame, assertTrue, 
    assertTrue, countTestCases, createResult, fail, fail, failNotEquals, 
    failNotSame, failSame, format, getName, run, run, runBare, runTest, setName, 
    setUp, tearDown, toString
    -
    -
     
     
     
     Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
     title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
     /Object.html?is-external=true#notifyAll--" title="class or interface in 
    java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
     title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
     title="class or interface in java.lang">wait
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
     title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
     title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
     title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
     title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
     title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
     title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
     /Object.html?is-external=true#notifyAll--" title="class or interface in 
    java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
     title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
     title="class or interface in java.lang">wait, 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
     
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
    index 9ebdf9a..3c63532 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
    @@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -class HRegion.RegionScannerImpl
    +class HRegion.RegionScannerImpl
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements RegionScanner, Shipper, RpcCallback
     RegionScannerImpl is used to combine scanners from multiple 
    Stores (aka column families).
    @@ -425,7 +425,7 @@ implements 
     
     storeHeap
    -KeyValueHeap storeHeap
    +KeyValueHeap storeHeap
     
     
     
    @@ -434,7 +434,7 @@ implements 
     
     joinedHeap
    -KeyValueHeap joinedHeap
    +KeyValueHeap joinedHeap
     Heap of key-values that are not essential for the provided 
    filters and are thus read
      on demand, if on-demand column family loading is enabled.
     
    @@ -445,7 +445,7 @@ implements 
     
     joinedContinuationRow
    -protectedCell joinedContinuationRow
    +protectedCell joinedContinuationRow
     If the joined heap data gathering is interrupted due to 
    scan limits, this will
      contain the row for which we are populating the values.
     
    @@ -456,7 +456,7 @@ implements 
     
     filterClosed
    -privateboolean filterClosed
    +privateboolean filterClosed
     
     
     
    @@ -465,7 +465,7 @@ implements 
     
     stopRow
    -protected finalbyte[] stopRow
    +protected finalbyte[] stopRow
     
     
     
    @@ -474,7 +474,7 @@ implements 
     
     includeStopRow
    -protected finalboolean includeStopRow
    +protected finalboolean includeStopRow
     
     
     
    @@ -483,7 +483,7 @@ implements 
     
     region
    -protected finalHRegion region
    +protected finalHRegion region
     
     
     
    @@ -492,7 +492,7 @@ implements 
     
     comparator
    -protected finalCellComparator comparator
    +protected finalCellComparator comparator
     
     
     
    @@ -501,7 +501,7 @@ implements 
     
     readPt
    -private finallong readPt
    +private finallong readPt
     
     
     
    @@ -510,7 +510,7 @@ implements 
     
     maxResultSize
    -private finallong maxResultSize
    +private finallong maxResultSize
     
     
     
    @@ -519,7 +519,7 @@ implements 
     
     defaultScannerContext
    -private finalScannerContext defaultScannerContext
    +private finalScannerContext defaultScannerContext
     
     
     
    @@ -528,7 +528,7 @@ implements 
     
     filter
    -private finalFilterWrapper filter
    +private finalFilterWrapper filter
     
     
     
    @@ -545,7 +545,7 @@ implements 
     
     RegionScannerImpl
    -RegionScannerImpl(Scanscan,
    +RegionScannerImpl(Scanscan,
       http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
       HRegionregion)
    throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
    @@ -561,7 +561,7 @@ implements 
     
     RegionScannerImpl
    -RegionScannerImpl(Scanscan,
    +RegionScannerImpl(Scanscan,
       http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
       HRegionregion,
       longnonceGroup,
    @@ -587,7 +587,7 @@ implements 
     
     getRegionInfo
    -publicRegionInfogetRegionInfo()
    +publicRegionInfogetRegionInfo()
     
     Specified by:
     getRegionInfoin
     interfaceRegionScanner
    @@ -602,7 +602,7 @@ implements 
     
     initializeScanners
    -protectedvoidinitializeScanners(Scanscan,
    +protectedvoidinitializeScanners(Scanscan,
       http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListKeyValueScanneradditionalScanners)
    throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOException
     
    @@ -617,7 +617,7 @@ implements 
     
     initializeKVHeap
    -protectedvoidinitializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListKeyValueScannerscanners,
    +protectedvoidinitializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListKeyValueScannerscanners,
     http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListKeyValueScannerjoinedScanners,
     HRegionregion)
      throws 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.HangOnCloseThenRSCrashExecutor.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.HangOnCloseThenRSCrashExecutor.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.HangOnCloseThenRSCrashExecutor.html
    index f1db5ca..d8515d7 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.HangOnCloseThenRSCrashExecutor.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.HangOnCloseThenRSCrashExecutor.html
    @@ -32,813 +32,820 @@
     024import static org.junit.Assert.fail;
     025
     026import java.io.IOException;
    -027import java.net.SocketTimeoutException;
    -028import java.util.NavigableMap;
    -029import java.util.Random;
    -030import java.util.Set;
    -031import java.util.SortedSet;
    -032import 
    java.util.concurrent.ConcurrentSkipListMap;
    -033import 
    java.util.concurrent.ConcurrentSkipListSet;
    -034import 
    java.util.concurrent.ExecutionException;
    -035import java.util.concurrent.Executors;
    -036import java.util.concurrent.Future;
    -037import 
    java.util.concurrent.ScheduledExecutorService;
    -038import java.util.concurrent.TimeUnit;
    -039
    -040import 
    org.apache.hadoop.conf.Configuration;
    -041import 
    org.apache.hadoop.hbase.CategoryBasedTimeout;
    -042import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -043import 
    org.apache.hadoop.hbase.HBaseTestingUtility;
    -044import 
    org.apache.hadoop.hbase.NotServingRegionException;
    -045import 
    org.apache.hadoop.hbase.ServerName;
    -046import 
    org.apache.hadoop.hbase.TableName;
    -047import 
    org.apache.hadoop.hbase.client.RegionInfo;
    -048import 
    org.apache.hadoop.hbase.client.RegionInfoBuilder;
    -049import 
    org.apache.hadoop.hbase.client.RetriesExhaustedException;
    -050import 
    org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
    -051import 
    org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
    -052import 
    org.apache.hadoop.hbase.master.MasterServices;
    -053import 
    org.apache.hadoop.hbase.master.RegionState.State;
    -054import 
    org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
    -055import 
    org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
    -056import 
    org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher;
    -057import 
    org.apache.hadoop.hbase.procedure2.Procedure;
    -058import 
    org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
    -059import 
    org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
    -060import 
    org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
    -061import 
    org.apache.hadoop.hbase.procedure2.util.StringUtils;
    -062import 
    org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
    -063import 
    org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
    -064import 
    org.apache.hadoop.hbase.testclassification.MasterTests;
    -065import 
    org.apache.hadoop.hbase.testclassification.MediumTests;
    -066import 
    org.apache.hadoop.hbase.util.Bytes;
    -067import 
    org.apache.hadoop.hbase.util.FSUtils;
    -068import 
    org.apache.hadoop.ipc.RemoteException;
    -069import org.junit.After;
    -070import org.junit.Before;
    -071import org.junit.Ignore;
    -072import org.junit.Rule;
    -073import org.junit.Test;
    -074import 
    org.junit.experimental.categories.Category;
    -075import 
    org.junit.rules.ExpectedException;
    -076import org.junit.rules.TestName;
    -077import org.junit.rules.TestRule;
    -078import org.slf4j.Logger;
    -079import org.slf4j.LoggerFactory;
    -080import 
    org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
    -081import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
    -082import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
    -083import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
    -084import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
    -085import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
    -086import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo;
    -087import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
    -088import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
    -089import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
    -090import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
    -091import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
    -092
    -093@Category({MasterTests.class, 
    MediumTests.class})
    -094public class TestAssignmentManager {
    -095  private 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/src-html/org/apache/hadoop/hbase/TestCellUtil.NonExtendedCell.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestCellUtil.NonExtendedCell.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestCellUtil.NonExtendedCell.html
    index 232ef56..bc3a6d0 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestCellUtil.NonExtendedCell.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestCellUtil.NonExtendedCell.html
    @@ -29,610 +29,626 @@
     021import static 
    org.junit.Assert.assertEquals;
     022import static 
    org.junit.Assert.assertFalse;
     023import static 
    org.junit.Assert.assertTrue;
    -024
    -025import java.io.ByteArrayOutputStream;
    -026import java.io.IOException;
    -027import java.math.BigDecimal;
    -028import java.nio.ByteBuffer;
    -029import java.util.ArrayList;
    -030import java.util.List;
    -031import java.util.NavigableMap;
    -032import java.util.TreeMap;
    -033import 
    org.apache.hadoop.hbase.testclassification.MiscTests;
    -034import 
    org.apache.hadoop.hbase.testclassification.SmallTests;
    -035import 
    org.apache.hadoop.hbase.util.Bytes;
    -036import org.junit.Assert;
    -037import org.junit.Test;
    -038import 
    org.junit.experimental.categories.Category;
    -039
    -040@Category({MiscTests.class, 
    SmallTests.class})
    -041public class TestCellUtil {
    -042  /**
    -043   * CellScannable used in test. Returns 
    a {@link TestCellScanner}
    -044   */
    -045  private static class TestCellScannable 
    implements CellScannable {
    -046private final int cellsCount;
    -047TestCellScannable(final int 
    cellsCount) {
    -048  this.cellsCount = cellsCount;
    -049}
    -050@Override
    -051public CellScanner cellScanner() {
    -052  return new 
    TestCellScanner(this.cellsCount);
    -053}
    -054  }
    -055
    -056  /**
    -057   * CellScanner used in test.
    -058   */
    -059  private static class TestCellScanner 
    implements CellScanner {
    -060private int count = 0;
    -061private Cell current = null;
    -062private final int cellsCount;
    -063
    -064TestCellScanner(final int cellsCount) 
    {
    -065  this.cellsCount = cellsCount;
    -066}
    -067
    -068@Override
    -069public Cell current() {
    -070  return this.current;
    -071}
    -072
    -073@Override
    -074public boolean advance() throws 
    IOException {
    -075  if (this.count  cellsCount) {
    -076this.current = new 
    TestCell(this.count);
    -077this.count++;
    -078return true;
    -079  }
    -080  return false;
    -081}
    -082  }
    -083
    -084  /**
    -085   * Cell used in test. Has row only.
    -086   */
    -087  private static class TestCell 
    implements Cell {
    -088private final byte [] row;
    -089
    -090TestCell(final int i) {
    -091  this.row = Bytes.toBytes(i);
    -092}
    -093
    -094@Override
    -095public byte[] getRowArray() {
    -096  return this.row;
    -097}
    -098
    -099@Override
    -100public int getRowOffset() {
    -101  return 0;
    -102}
    -103
    -104@Override
    -105public short getRowLength() {
    -106  return (short)this.row.length;
    -107}
    -108
    -109@Override
    -110public byte[] getFamilyArray() {
    -111  // TODO Auto-generated method 
    stub
    -112  return null;
    -113}
    -114
    -115@Override
    -116public int getFamilyOffset() {
    -117  // TODO Auto-generated method 
    stub
    -118  return 0;
    -119}
    -120
    -121@Override
    -122public byte getFamilyLength() {
    -123  // TODO Auto-generated method 
    stub
    -124  return 0;
    -125}
    -126
    -127@Override
    -128public byte[] getQualifierArray() {
    -129  // TODO Auto-generated method 
    stub
    -130  return null;
    -131}
    -132
    -133@Override
    -134public int getQualifierOffset() {
    -135  // TODO Auto-generated method 
    stub
    -136  return 0;
    -137}
    -138
    -139@Override
    -140public int getQualifierLength() {
    -141  // TODO Auto-generated method 
    stub
    -142  return 0;
    -143}
    -144
    -145@Override
    -146public long getTimestamp() {
    -147  // TODO Auto-generated method 
    stub
    -148  return 0;
    -149}
    -150
    -151@Override
    -152public byte getTypeByte() {
    -153  // TODO Auto-generated method 
    stub
    -154  return 0;
    -155}
    -156
    -157@Override
    -158public byte[] getValueArray() {
    -159  // TODO Auto-generated method 
    stub
    -160  return null;
    -161}
    -162
    -163@Override
    -164public int getValueOffset() {
    -165  // TODO Auto-generated method 
    stub
    -166  return 0;
    -167}
    -168
    -169@Override
    -170public int getValueLength() {
    -171  // TODO Auto-generated method 
    stub
    -172  return 0;
    -173}
    -174
    -175@Override
    -176public byte[] getTagsArray() {
    -177  // TODO Auto-generated method 
    stub
    -178  return null;
    -179}
    -180
    -181@Override
    -182public int getTagsOffset() {
    -183  // TODO Auto-generated method 
    stub
    -184  return 0;
    -185}
    -186
    -187@Override
    -188public long getSequenceId() {
    -189   

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.html
    --
    diff --git 
    a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.html
     
    b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.html
    index 7534235..762219a 100644
    --- 
    a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.html
    +++ 
    b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.html
    @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -public class TestHRegionReplayEvents
    +public class TestHRegionReplayEvents
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     Tests of HRegion methods for replaying flush, compaction, 
    region open, etc events for secondary
      region replicas
    @@ -164,7 +164,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     name
     
     
    -private 
    org.apache.hadoop.hbase.HRegionInfo
    +private 
    org.apache.hadoop.hbase.client.RegionInfo
     primaryHri
     
     
    @@ -192,7 +192,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     rss
     
     
    -private 
    org.apache.hadoop.hbase.HRegionInfo
    +private 
    org.apache.hadoop.hbase.client.RegionInfo
     secondaryHri
     
     
    @@ -525,7 +525,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     LOG
    -private static finalorg.slf4j.Logger LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -534,7 +534,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     name
    -publicorg.junit.rules.TestName name
    +publicorg.junit.rules.TestName name
     
     
     
    @@ -543,7 +543,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     TEST_UTIL
    -private staticHBaseTestingUtility TEST_UTIL
    +private staticHBaseTestingUtility TEST_UTIL
     
     
     
    @@ -552,7 +552,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     CONF
    -public staticorg.apache.hadoop.conf.Configuration CONF
    +public staticorg.apache.hadoop.conf.Configuration CONF
     
     
     
    @@ -561,7 +561,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     dir
    -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String dir
    +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String dir
     
     
     
    @@ -570,7 +570,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     families
    -privatebyte[][] families
    +privatebyte[][] families
     
     
     
    @@ -579,7 +579,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     tableName
    -protectedbyte[] tableName
    +protectedbyte[] tableName
     
     
     
    @@ -588,7 +588,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     method
    -protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String method
    +protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String method
     
     
     
    @@ -597,7 +597,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     row
    -protected finalbyte[] row
    +protected finalbyte[] row
     
     
     
    @@ -606,7 +606,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     row2
    -protected finalbyte[] row2
    +protected finalbyte[] row2
     
     
     
    @@ -615,7 +615,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     cq
    -protectedbyte[] cq
    +protectedbyte[] cq
     
     
     
    @@ -624,7 +624,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     rootDir
    -privateorg.apache.hadoop.fs.Path rootDir
    +privateorg.apache.hadoop.fs.Path rootDir
     
     
     
    @@ -633,7 +633,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     htd
    -privateorg.apache.hadoop.hbase.client.TableDescriptor htd
    +privateorg.apache.hadoop.hbase.client.TableDescriptor htd
     
     
     
    @@ -642,7 +642,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     time
    -privatelong time
    +privatelong time
     
     
     
    @@ -651,7 +651,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     rss
    -privateorg.apache.hadoop.hbase.regionserver.RegionServerServices rss
    +privateorg.apache.hadoop.hbase.regionserver.RegionServerServices rss
     
     
     
    @@ -660,7 +660,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     primaryHri
    -privateorg.apache.hadoop.hbase.HRegionInfo primaryHri
    +privateorg.apache.hadoop.hbase.client.RegionInfo primaryHri
     
     
     
    @@ -669,7 +669,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     secondaryHri
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html 
    b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
    index 623bd1b..5c6e7db 100644
    --- a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
    +++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
    @@ -327,7 +327,9 @@ extends 
     RemoteProcedureDispatcher.RemoteOperation
     remoteCallBuild(MasterProcedureEnvenv,
    -   ServerNameserverName)
    +   ServerNameserverName)
    +For building the remote operation.
    +
     
     
     protected boolean
    @@ -374,7 +376,7 @@ extends RegionTransitionProcedure
    -abort,
     acquireLock,
     addToRemoteDispatcher,
     execute,
     getRegionInfo,
     getRegionState,
     getTableName,
     getTransitionState,
     hasLock,
     holdLock,
     isMeta,
     isServerOnline,
     isServerOnline,
     releaseLock,
     remoteCallCompleted,
     remoteCallFailed,
     reportTransition,
     rollback,
     setRegionInfo,
     setTransitionState,
     shouldWaitClientAck,
     toStringState
    +abort,
     acquireLock,
     addToRemoteDispatcher,
     execute,
     getRegionInfo,
     getRegionState,
     getTableName,
     getTransitionState,
     hasLock,
     holdLock,
     isMeta,
     isServerOnline,
     isServerOnline,
     releaseLock,
     remoteCallFailed,
     remoteOperationCompleted,
     remoteOperationFailed,
     reportTransition,
     rollback,
     setRegionInfo, setTransitionState,
     shouldWaitClientAck,
     toStringState
     
     
     
    @@ -674,6 +676,8 @@ extends RemoteProcedureDispatcher.RemoteOperationremoteCallBuild(MasterProcedureEnvenv,
      ServerNameserverName)
    +Description copied from 
    interface:RemoteProcedureDispatcher.RemoteProcedure
    +For building the remote operation.
     
     Specified by:
     remoteCallBuildin
     interfaceRemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,ServerName
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
     
    b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    index 8d59a6a..8df5ed6 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
    @@ -18,7 +18,7 @@
     catch(err) {
     }
     //-->
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":6,"i5":10,"i6":10,"i7":6,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":6,"i14":10,"i15":10,"i16":10,"i17":6,"i18":10,"i19":6,"i20":10,"i21":6,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":6,"i28":10,"i29":10,"i30":6};
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":6,"i5":10,"i6":10,"i7":6,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":6,"i14":10,"i15":10,"i16":10,"i17":6,"i18":6,"i19":10,"i20":10,"i21":10,"i22":6,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":6,"i29":10,"i30":10,"i31":6};
     var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
     var altColor = "altColor";
     var rowColor = "rowColor";
    @@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Private
    -public abstract class RegionTransitionProcedure
    +public abstract class RegionTransitionProcedure
     extends ProcedureMasterProcedureEnv
     implements TableProcedureInterface, RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,ServerName
     Base class for the Assign and Unassign Procedure.
    @@ -363,80 +363,93 @@ implements 
     abstract RemoteProcedureDispatcher.RemoteOperation
     remoteCallBuild(MasterProcedureEnvenv,
    -   ServerNameserverName)
    +   ServerNameserverName)
    +For building the remote operation.
    +
     
     
    -void
    -remoteCallCompleted(MasterProcedureEnvenv,
    -   ServerNameserverName,
    -   RemoteProcedureDispatcher.RemoteOperationresponse)
    -
    -
     protected abstract boolean
     remoteCallFailed(MasterProcedureEnvenv,
     RegionStates.RegionStateNoderegionNode,
     http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in 
    java.io">IOExceptionexception)
     
    -
    +
     void
     remoteCallFailed(MasterProcedureEnvenv,
     ServerNameserverName,
    -http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in 
    java.io">IOExceptionexception)
    +http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
     title="class or interface in java.io">IOExceptionexception)
    +Called when the executeProcedure call is failed.
    +
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/package-use.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/package-use.html 
    b/devapidocs/org/apache/hadoop/hbase/package-use.html
    index 5a3a58b..d9cf5eb 100644
    --- a/devapidocs/org/apache/hadoop/hbase/package-use.html
    +++ b/devapidocs/org/apache/hadoop/hbase/package-use.html
    @@ -1063,11 +1063,16 @@ service.
     
     
     
    +ClusterMetrics
    +Metrics information on the HBase cluster.
    +
    +
    +
     ClusterMetrics.Option
     Kinds of ClusterMetrics
     
     
    -
    +
     ClusterStatus
     Deprecated.
     As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    @@ -1075,32 +1080,32 @@ service.
     
     
     
    -
    +
     CompareOperator
     Generic set of comparison operators.
     
     
    -
    +
     DoNotRetryIOException
     Subclass if exception is not meant to be retried: e.g.
     
     
    -
    +
     ExtendedCell
     Extension to Cell with server side required 
    functions.
     
     
    -
    +
     HBaseIOException
     All hbase specific IOExceptions should be subclasses of 
    HBaseIOException
     
     
    -
    +
     HColumnDescriptor
     Deprecated.
     
     
    -
    +
     HRegionInfo
     Deprecated.
     As of release 2.0.0, this 
    will be removed in HBase 3.0.0.
    @@ -1108,13 +1113,13 @@ service.
     
     
     
    -
    +
     HRegionLocation
     Data structure to hold RegionInfo and the address for the 
    hosting
      HRegionServer.
     
     
    -
    +
     HTableDescriptor
     Deprecated.
     As of release 2.0.0, this 
    will be removed in HBase 3.0.0.
    @@ -1122,54 +1127,46 @@ service.
     
     
     
    -
    +
     KeepDeletedCells
     Ways to keep cells marked for delete around.
     
     
    -
    +
     KeyValue
     An HBase Key/Value.
     
     
    -
    +
     MasterNotRunningException
     Thrown if the master is not running
     
     
    -
    +
     MemoryCompactionPolicy
     Enum describing all possible memory compaction 
    policies
     
     
    -
    +
     NamespaceDescriptor
     Namespace POJO class.
     
     
    -
    +
     NamespaceNotFoundException
     Thrown when a namespace can not be located
     
     
    -
    +
     RawCell
     An extended version of cell that gives more power to 
    CPs
     
     
    -
    +
     RegionException
     Thrown when something happens related to region 
    handling.
     
     
    -
    -RegionLoad
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use RegionMetrics instead.
    -
    -
    -
     
     RegionLocations
     Container for holding a list of HRegionLocation's that correspond to 
    the
    @@ -1177,31 +1174,36 @@ service.
     
     
     
    +RegionMetrics
    +Encapsulates per-region load metrics.
    +
    +
    +
     ServerName
     Name of a particular incarnation of an HBase Server.
     
     
    -
    +
     TableExistsException
     Thrown when a table exists but should not
     
     
    -
    +
     TableName
     Immutable POJO class for representing a table name.
     
     
    -
    +
     TableNotFoundException
     Thrown when a table can not be located
     
     
    -
    +
     Tag
     Tags are part of cells and helps to add metadata about 
    them.
     
     
    -
    +
     ZooKeeperConnectionException
     Thrown if the client can't connect to zookeeper
     
    @@ -1441,11 +1443,8 @@ service.
     
     
     
    -ClusterStatus
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use ClusterMetrics 
    instead.
    -
    +ClusterMetrics
    +Metrics information on the HBase cluster.
     
     
     
    @@ -1956,16 +1955,13 @@ service.
     
     
     
    -ClusterMetrics.Option
    -Kinds of ClusterMetrics
    +ClusterMetrics
    +Metrics information on the HBase cluster.
     
     
     
    -ClusterStatus
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use ClusterMetrics 
    instead.
    -
    +ClusterMetrics.Option
    +Kinds of ClusterMetrics
     
     
     
    @@ -2137,11 +2133,8 @@ service.
     
     
     
    -ClusterStatus
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use ClusterMetrics 
    instead.
    -
    +ClusterMetrics
    +Metrics information on the HBase cluster.
     
     
     
    @@ -2155,11 +2148,8 @@ service.
     
     
     
    -RegionLoad
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use RegionMetrics instead.
    -
    +RegionMetrics
    +Encapsulates per-region load metrics.
     
     
     
    @@ -3205,11 +3195,8 @@ service.
     
     
     
    -ClusterStatus
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use ClusterMetrics 
    instead.
    -
    +ClusterMetrics
    +Metrics information on the HBase cluster.
     
     
     
    @@ -3628,11 +3615,8 @@ service.
     
     
     
    -ClusterStatus
    -Deprecated.
    -As of release 2.0.0, this 
    will be removed in HBase 3.0.0
    - Use ClusterMetrics 
    instead.
    -
    +ClusterMetrics
    +Metrics information on the HBase cluster.
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
    b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
    index 90bcee3..cde2f87 100644
    --- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
    +++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
    @@ -204,9 +204,9 @@
     java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
     title="class or interface in java.lang">EnumE 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
    index b8e321a..439a50d 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverOperationWithResult.html
    @@ -468,274 +468,216 @@
     460  }
     461
     462  /**
    -463   * Used to gracefully handle fallback 
    to deprecated methods when we
    -464   * evolve coprocessor APIs.
    -465   *
    -466   * When a particular Coprocessor API is 
    updated to change methods, hosts can support fallback
    -467   * to the deprecated API by using this 
    method to determine if an instance implements the new API.
    -468   * In the event that said support is 
    partial, then in the face of a runtime issue that prevents
    -469   * proper operation {@link 
    #legacyWarning(Class, String)} should be used to let operators know.
    -470   *
    -471   * For examples of this in action, see 
    the implementation of
    -472   * ul
    -473   *   li{@link 
    org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost}
    -474   *   li{@link 
    org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost}
    -475   * /ul
    -476   *
    -477   * @param clazz Coprocessor you wish to 
    evaluate
    -478   * @param methodName the name of the 
    non-deprecated method version
    -479   * @param parameterTypes the Class of 
    the non-deprecated method's arguments in the order they are
    -480   * declared.
    -481   */
    -482  @InterfaceAudience.Private
    -483  protected static boolean 
    useLegacyMethod(final Class? extends Coprocessor clazz,
    -484  final String methodName, final 
    Class?... parameterTypes) {
    -485boolean useLegacy;
    -486// Use reflection to see if they 
    implement the non-deprecated version
    -487try {
    -488  clazz.getDeclaredMethod(methodName, 
    parameterTypes);
    -489  LOG.debug("Found an implementation 
    of '" + methodName + "' that uses updated method " +
    -490  "signature. Skipping legacy 
    support for invocations in '" + clazz +"'.");
    -491  useLegacy = false;
    -492} catch (NoSuchMethodException 
    exception) {
    -493  useLegacy = true;
    -494} catch (SecurityException exception) 
    {
    -495  LOG.warn("The Security Manager 
    denied our attempt to detect if the coprocessor '" + clazz +
    -496  "' requires legacy support; 
    assuming it does. If you get later errors about legacy " +
    -497  "coprocessor use, consider 
    updating your security policy to allow access to the package" +
    -498  " and declared members of your 
    implementation.");
    -499  LOG.debug("Details of Security 
    Manager rejection.", exception);
    -500  useLegacy = true;
    +463   * Used to limit legacy handling to 
    once per Coprocessor class per classloader.
    +464   */
    +465  private static final SetClass? 
    extends Coprocessor legacyWarning =
    +466  new 
    ConcurrentSkipListSet(
    +467  new ComparatorClass? 
    extends Coprocessor() {
    +468@Override
    +469public int compare(Class? 
    extends Coprocessor c1, Class? extends Coprocessor c2) {
    +470  if (c1.equals(c2)) {
    +471return 0;
    +472  }
    +473  return 
    c1.getName().compareTo(c2.getName());
    +474}
    +475  });
    +476
    +477  /**
    +478   * Implementations defined function to 
    get an observer of type {@code O} from a coprocessor of
    +479   * type {@code C}. Concrete 
    implementations of CoprocessorHost define one getter for each
    +480   * observer they can handle. For e.g. 
    RegionCoprocessorHost will use 3 getters, one for
    +481   * each of RegionObserver, 
    EndpointObserver and BulkLoadObserver.
    +482   * These getters are used by {@code 
    ObserverOperation} to get appropriate observer from the
    +483   * coprocessor.
    +484   */
    +485  @FunctionalInterface
    +486  public interface ObserverGetterC, 
    O extends FunctionC, OptionalO {}
    +487
    +488  private abstract class 
    ObserverOperationO extends ObserverContextImplE {
    +489ObserverGetterC, O 
    observerGetter;
    +490
    +491
    ObserverOperation(ObserverGetterC, O observerGetter) {
    +492  this(observerGetter, null);
    +493}
    +494
    +495
    ObserverOperation(ObserverGetterC, O observerGetter, User user) {
    +496  this(observerGetter, user, 
    false);
    +497}
    +498
    +499
    ObserverOperation(ObserverGetterC, O observerGetter, boolean 
    bypassable) {
    +500  this(observerGetter, null, 
    bypassable);
     501}
    -502return useLegacy;
    -503  }
    -504
    -505  /**
    -506   * Used to limit legacy handling to 
    once per Coprocessor class per classloader.
    -507   */
    -508  private static final SetClass? 
    extends Coprocessor legacyWarning =
    -509 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/util/class-use/StreamUtils.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/util/class-use/StreamUtils.html 
    b/devapidocs/org/apache/hadoop/hbase/io/util/class-use/StreamUtils.html
    index 17c0799..03aabcc 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/util/class-use/StreamUtils.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/util/class-use/StreamUtils.html
    @@ -120,6 +120,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/util/package-summary.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/io/util/package-summary.html 
    b/devapidocs/org/apache/hadoop/hbase/io/util/package-summary.html
    index e306a6f..cae16d0 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/util/package-summary.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/util/package-summary.html
    @@ -185,6 +185,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/util/package-tree.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/io/util/package-tree.html 
    b/devapidocs/org/apache/hadoop/hbase/io/util/package-tree.html
    index 6fcbae2..ffed0ed 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/util/package-tree.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/util/package-tree.html
    @@ -146,6 +146,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/util/package-use.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/io/util/package-use.html 
    b/devapidocs/org/apache/hadoop/hbase/io/util/package-use.html
    index 7cbfd23..0d2e448 100644
    --- a/devapidocs/org/apache/hadoop/hbase/io/util/package-use.html
    +++ b/devapidocs/org/apache/hadoop/hbase/io/util/package-use.html
    @@ -232,6 +232,6 @@
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.AbstractRpcChannel.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.AbstractRpcChannel.html
     
    b/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.AbstractRpcChannel.html
    index 27cc7cf..ab10d01 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.AbstractRpcChannel.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.AbstractRpcChannel.html
    @@ -367,6 +367,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.BlockingRpcChannelImplementation.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.BlockingRpcChannelImplementation.html
     
    b/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.BlockingRpcChannelImplementation.html
    index fe5a296..c84133e 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.BlockingRpcChannelImplementation.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/ipc/AbstractRpcClient.BlockingRpcChannelImplementation.html
    @@ -325,6 +325,6 @@ implements 
    org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel
     
     
    -Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
    +Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
    reserved.
     
     
    
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CountingThreadFactory.html
    --
    diff --git 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CountingThreadFactory.html
     
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CountingThreadFactory.html
    index bbd91b8..4f76302 100644
    --- 
    a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CountingThreadFactory.html
    +++ 
    b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.CountingThreadFactory.html
    @@ -56,1641 +56,1753 @@
     048import 
    java.util.concurrent.atomic.AtomicBoolean;
     049import 
    java.util.concurrent.atomic.AtomicInteger;
     050import 
    java.util.concurrent.atomic.AtomicLong;
    -051
    -052import 
    org.apache.hadoop.conf.Configuration;
    -053import 
    org.apache.hadoop.hbase.CallQueueTooBigException;
    -054import 
    org.apache.hadoop.hbase.CategoryBasedTimeout;
    -055import org.apache.hadoop.hbase.Cell;
    -056import 
    org.apache.hadoop.hbase.HConstants;
    -057import 
    org.apache.hadoop.hbase.HRegionInfo;
    -058import 
    org.apache.hadoop.hbase.HRegionLocation;
    -059import 
    org.apache.hadoop.hbase.RegionLocations;
    -060import 
    org.apache.hadoop.hbase.ServerName;
    -061import 
    org.apache.hadoop.hbase.TableName;
    -062import 
    org.apache.hadoop.hbase.client.AsyncProcessTask.ListRowAccess;
    -063import 
    org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows;
    -064import 
    org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
    -065import 
    org.apache.hadoop.hbase.client.backoff.ServerStatistics;
    -066import 
    org.apache.hadoop.hbase.client.coprocessor.Batch;
    -067import 
    org.apache.hadoop.hbase.ipc.RpcControllerFactory;
    -068import 
    org.apache.hadoop.hbase.testclassification.ClientTests;
    -069import 
    org.apache.hadoop.hbase.testclassification.MediumTests;
    -070import 
    org.apache.hadoop.hbase.util.Bytes;
    -071import 
    org.apache.hadoop.hbase.util.Threads;
    -072import org.junit.Assert;
    -073import org.junit.BeforeClass;
    -074import org.junit.Ignore;
    -075import org.junit.Rule;
    -076import org.junit.Test;
    -077import 
    org.junit.experimental.categories.Category;
    -078import org.junit.rules.TestRule;
    -079import org.mockito.Mockito;
    -080import org.slf4j.Logger;
    -081import org.slf4j.LoggerFactory;
    -082
    -083@Category({ClientTests.class, 
    MediumTests.class})
    -084public class TestAsyncProcess {
    -085  @Rule public final TestRule timeout = 
    CategoryBasedTimeout.builder().withTimeout(this.getClass()).
    -086  
    withLookingForStuckThread(true).build();
    -087  private static final Logger LOG = 
    LoggerFactory.getLogger(TestAsyncProcess.class);
    -088  private static final TableName 
    DUMMY_TABLE =
    -089  TableName.valueOf("DUMMY_TABLE");
    -090  private static final byte[] 
    DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1");
    -091  private static final byte[] 
    DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2");
    -092  private static final byte[] 
    DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3");
    -093  private static final byte[] FAILS = 
    Bytes.toBytes("FAILS");
    -094  private static final Configuration CONF 
    = new Configuration();
    -095  private static final 
    ConnectionConfiguration CONNECTION_CONFIG =
    -096  new 
    ConnectionConfiguration(CONF);
    -097  private static final ServerName sn = 
    ServerName.valueOf("s1,1,1");
    -098  private static final ServerName sn2 = 
    ServerName.valueOf("s2,2,2");
    -099  private static final ServerName sn3 = 
    ServerName.valueOf("s3,3,3");
    -100  private static final HRegionInfo hri1 
    =
    -101  new HRegionInfo(DUMMY_TABLE, 
    DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1);
    -102  private static final HRegionInfo hri2 
    =
    -103  new HRegionInfo(DUMMY_TABLE, 
    DUMMY_BYTES_2, HConstants.EMPTY_END_ROW, false, 2);
    -104  private static final HRegionInfo hri3 
    =
    -105  new HRegionInfo(DUMMY_TABLE, 
    DUMMY_BYTES_3, HConstants.EMPTY_END_ROW, false, 3);
    -106  private static final HRegionLocation 
    loc1 = new HRegionLocation(hri1, sn);
    -107  private static final HRegionLocation 
    loc2 = new HRegionLocation(hri2, sn);
    -108  private static final HRegionLocation 
    loc3 = new HRegionLocation(hri3, sn2);
    -109
    -110  // Replica stuff
    -111  private static final RegionInfo hri1r1 
    = RegionReplicaUtil.getRegionInfoForReplica(hri1, 1);
    -112  private static final RegionInfo hri1r2 
    = RegionReplicaUtil.getRegionInfoForReplica(hri1, 2);
    -113  private static final RegionInfo hri2r1 
    = RegionReplicaUtil.getRegionInfoForReplica(hri2, 1);
    -114  private static final RegionLocations 
    hrls1 = new RegionLocations(new HRegionLocation(hri1, sn),
    -115  new HRegionLocation(hri1r1, sn2), 
    new HRegionLocation(hri1r2, sn3));
    -116  private static final RegionLocations 
    hrls2 = new RegionLocations(new HRegionLocation(hri2, sn2),
    -117  new HRegionLocation(hri2r1, 
    sn3));
    -118  private static final RegionLocations 
    hrls3 =
    -119  new RegionLocations(new 
    HRegionLocation(hri3, sn3), null);
    -120
    -121  

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
    index e673e87..b75289b 100644
    --- 
    a/devapidocs/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
    @@ -134,7 +134,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Field and Description
     
     
    -private 
    org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution
    +private 
    org.apache.hbase.thirdparty.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution
     cache
     
     
    @@ -150,7 +150,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     EMPTY_BLOCK_DISTRIBUTION
     
     
    -private 
    org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService
    +private 
    org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningExecutorService
     executor
     
     
    @@ -158,7 +158,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     lastFullRefresh
     
     
    -private 
    org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoaderRegionInfo,HDFSBlocksDistribution
    +private 
    org.apache.hbase.thirdparty.com.google.common.cache.CacheLoaderRegionInfo,HDFSBlocksDistribution
     loader
     
     
    @@ -206,11 +206,11 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     Method and Description
     
     
    -private 
    org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFutureHDFSBlocksDistribution
    +private 
    org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFutureHDFSBlocksDistribution
     asyncGetBlockDistribution(RegionInfohri)
     
     
    -private 
    org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution
    +private 
    org.apache.hbase.thirdparty.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution
     createCache()
     Create a cache for region to list of servers
     
    @@ -220,7 +220,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     getBlockDistribution(RegionInfohri)
     
     
    -(package private) 
    org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution
    +(package private) 
    org.apache.hbase.thirdparty.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution
     getCache()
     
     
    @@ -367,7 +367,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     executor
    -private 
    finalorg.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService
     executor
    +private 
    finalorg.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningExecutorService
     executor
     
     
     
    @@ -385,7 +385,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     loader
    -privateorg.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoaderRegionInfo,HDFSBlocksDistribution loader
    +privateorg.apache.hbase.thirdparty.com.google.common.cache.CacheLoaderRegionInfo,HDFSBlocksDistribution loader
     
     
     
    @@ -394,7 +394,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     cache
    -privateorg.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution cache
    +privateorg.apache.hbase.thirdparty.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistribution cache
     
     
     
    @@ -428,7 +428,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     createCache
    -privateorg.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistributioncreateCache()
    +privateorg.apache.hbase.thirdparty.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistributioncreateCache()
     Create a cache for region to list of servers
     
     Parameters:
    @@ -579,7 +579,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     asyncGetBlockDistribution
    -privateorg.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFutureHDFSBlocksDistributionasyncGetBlockDistribution(RegionInfohri)
    +privateorg.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFutureHDFSBlocksDistributionasyncGetBlockDistribution(RegionInfohri)
     
     
     
    @@ -597,7 +597,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
     
     
     getCache
    -org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistributiongetCache()
    +org.apache.hbase.thirdparty.com.google.common.cache.LoadingCacheRegionInfo,HDFSBlocksDistributiongetCache()
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.html
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
     
    b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
    deleted file mode 100644
    index 41b74b8..000
    --- 
    a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
    +++ /dev/null
    @@ -1,1365 +0,0 @@
    -http://www.w3.org/TR/html4/loose.dtd;>
    -
    -
    -
    -
    -
    -BufferedDataBlockEncoder.OffheapDecodedCell (Apache HBase 
    3.0.0-SNAPSHOT API)
    -
    -
    -
    -
    -
    -var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10};
    -var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
    -var altColor = "altColor";
    -var rowColor = "rowColor";
    -var tableTab = "tableTab";
    -var activeTableTab = "activeTableTab";
    -
    -
    -JavaScript is disabled on your browser.
    -
    -
    -
    -
    -
    -Skip navigation links
    -
    -
    -
    -
    -Overview
    -Package
    -Class
    -Use
    -Tree
    -Deprecated
    -Index
    -Help
    -
    -
    -
    -
    -PrevClass
    -NextClass
    -
    -
    -Frames
    -NoFrames
    -
    -
    -AllClasses
    -
    -
    -
    -
    -
    -
    -
    -Summary:
    -Nested|
    -Field|
    -Constr|
    -Method
    -
    -
    -Detail:
    -Field|
    -Constr|
    -Method
    -
    -
    -
    -
    -
    -
    -
    -
    -org.apache.hadoop.hbase.io.encoding
    -Class BufferedDataBlockEncoder.OffheapDecodedCell
    -
    -
    -
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
    -
    -
    -org.apache.hadoop.hbase.ByteBufferCell
    -
    -
    -org.apache.hadoop.hbase.io.encoding.BufferedDataBlockEncoder.OffheapDecodedCell
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -All Implemented Interfaces:
    -http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
     title="class or interface in java.lang">Cloneable, Cell, ExtendedCell, HeapSize, RawCell
    -
    -
    -Enclosing class:
    -BufferedDataBlockEncoder
    -
    -
    -
    -protected static class BufferedDataBlockEncoder.OffheapDecodedCell
    -extends ByteBufferCell
    -implements ExtendedCell
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -Nested Class Summary
    -
    -
    -
    -
    -Nested classes/interfaces inherited from 
    interfaceorg.apache.hadoop.hbase.Cell
    -Cell.DataType
    -
    -
    -
    -
    -
    -
    -
    -
    -Field Summary
    -
    -Fields
    -
    -Modifier and Type
    -Field and Description
    -
    -
    -private byte
    -familyLength
    -
    -
    -private int
    -familyOffset
    -
    -
    -private static long
    -FIXED_OVERHEAD
    -
    -
    -private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -keyBuffer
    -
    -
    -private int
    -qualifierLength
    -
    -
    -private int
    -qualifierOffset
    -
    -
    -private short
    -rowLength
    -
    -
    -private long
    -seqId
    -
    -
    -private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -tagsBuffer
    -
    -
    -private int
    -tagsLength
    -
    -
    -private int
    -tagsOffset
    -
    -
    -private long
    -timestamp
    -
    -
    -private byte
    -typeByte
    -
    -
    -private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffer
    -valueBuffer
    -
    -
    -private int
    -valueLength
    -
    -
    -private int
    -valueOffset
    -
    -
    -
    -
    -
    -
    -Fields inherited from interfaceorg.apache.hadoop.hbase.ExtendedCell
    -CELL_NOT_BASED_ON_CHUNK
    -
    -
    -
    -
    -
    -Fields inherited from interfaceorg.apache.hadoop.hbase.RawCell
    -MAX_TAGS_LENGTH
    -
    -
    -
    -
    -
    -
    -
    -
    -Constructor Summary
    -
    -Constructors
    -
    -Modifier
    -Constructor and Description
    -
    -
    -protected 
    -OffheapDecodedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBufferkeyBuffer,
    -  shortrowLength,
    -  intfamilyOffset,
    -  bytefamilyLength,
    -  intqualOffset,
    -  intqualLength,
    -  longtimeStamp,
    -  bytetypeByte,
    -  http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
     title="class or interface in java.nio">ByteBuffervalueBuffer,
    -  intvalueOffset,
    -  intvalueLen,
    -  longseqId,
    -  

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
    index 3400507..2baa140 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
    @@ -28,3034 +28,2926 @@
     020import static 
    org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
     021import static 
    org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
     022
    -023import 
    com.google.common.annotations.VisibleForTesting;
    -024
    -025import java.io.DataOutput;
    -026import java.io.DataOutputStream;
    -027import java.io.IOException;
    -028import java.io.OutputStream;
    -029import java.math.BigDecimal;
    -030import java.nio.ByteBuffer;
    -031import java.util.ArrayList;
    -032import java.util.Iterator;
    -033import java.util.List;
    -034import java.util.Optional;
    -035
    -036import 
    org.apache.hadoop.hbase.KeyValue.Type;
    -037import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    -038import 
    org.apache.hadoop.hbase.io.HeapSize;
    -039import 
    org.apache.hadoop.hbase.io.TagCompressionContext;
    -040import 
    org.apache.hadoop.hbase.io.util.Dictionary;
    -041import 
    org.apache.hadoop.hbase.io.util.StreamUtils;
    -042import 
    org.apache.hadoop.hbase.util.ByteBufferUtils;
    -043import 
    org.apache.hadoop.hbase.util.ByteRange;
    -044import 
    org.apache.hadoop.hbase.util.Bytes;
    -045import 
    org.apache.hadoop.hbase.util.ClassSize;
    -046import 
    org.apache.yetus.audience.InterfaceAudience;
    -047
    -048
    -049/**
    -050 * Utility methods helpful slinging 
    {@link Cell} instances. It has more powerful and
    -051 * rich set of APIs than those in {@link 
    CellUtil} for internal usage.
    -052 */
    -053@InterfaceAudience.Private
    -054public final class PrivateCellUtil {
    -055
    -056  /**
    -057   * Private constructor to keep this 
    class from being instantiated.
    -058   */
    -059  private PrivateCellUtil() {
    -060  }
    +023import java.io.DataOutput;
    +024import java.io.DataOutputStream;
    +025import java.io.IOException;
    +026import java.io.OutputStream;
    +027import java.math.BigDecimal;
    +028import java.nio.ByteBuffer;
    +029import java.util.ArrayList;
    +030import java.util.Iterator;
    +031import java.util.List;
    +032import java.util.Optional;
    +033import 
    org.apache.hadoop.hbase.KeyValue.Type;
    +034import 
    org.apache.hadoop.hbase.filter.ByteArrayComparable;
    +035import 
    org.apache.hadoop.hbase.io.HeapSize;
    +036import 
    org.apache.hadoop.hbase.io.TagCompressionContext;
    +037import 
    org.apache.hadoop.hbase.io.util.Dictionary;
    +038import 
    org.apache.hadoop.hbase.io.util.StreamUtils;
    +039import 
    org.apache.hadoop.hbase.util.ByteBufferUtils;
    +040import 
    org.apache.hadoop.hbase.util.ByteRange;
    +041import 
    org.apache.hadoop.hbase.util.Bytes;
    +042import 
    org.apache.hadoop.hbase.util.ClassSize;
    +043import 
    org.apache.yetus.audience.InterfaceAudience;
    +044
    +045import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
    +046
    +047/**
    +048 * Utility methods helpful slinging 
    {@link Cell} instances. It has more powerful and
    +049 * rich set of APIs than those in {@link 
    CellUtil} for internal usage.
    +050 */
    +051@InterfaceAudience.Private
    +052public final class PrivateCellUtil {
    +053
    +054  /**
    +055   * Private constructor to keep this 
    class from being instantiated.
    +056   */
    +057  private PrivateCellUtil() {
    +058  }
    +059
    +060  /*** ByteRange 
    ***/
     061
    -062  /*** ByteRange 
    ***/
    -063
    -064  public static ByteRange 
    fillRowRange(Cell cell, ByteRange range) {
    -065return range.set(cell.getRowArray(), 
    cell.getRowOffset(), cell.getRowLength());
    -066  }
    -067
    -068  public static ByteRange 
    fillFamilyRange(Cell cell, ByteRange range) {
    -069return 
    range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
    cell.getFamilyLength());
    -070  }
    -071
    -072  public static ByteRange 
    fillQualifierRange(Cell cell, ByteRange range) {
    -073return 
    range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
    -074  cell.getQualifierLength());
    -075  }
    -076
    -077  public static ByteRange 
    fillValueRange(Cell cell, ByteRange range) {
    -078return 
    range.set(cell.getValueArray(), cell.getValueOffset(), 
    cell.getValueLength());
    -079  }
    -080
    -081  public static ByteRange 
    fillTagRange(Cell cell, ByteRange range) {
    -082return range.set(cell.getTagsArray(), 
    cell.getTagsOffset(), cell.getTagsLength());
    -083  }
    +062  public static ByteRange 
    fillRowRange(Cell cell, ByteRange range) {
    +063return range.set(cell.getRowArray(), 
    cell.getRowOffset(), cell.getRowLength());
    +064  }
    +065
    +066  public static ByteRange 
    fillFamilyRange(Cell cell, ByteRange range) {
    +067return 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b618ac40/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
    index 1f43c53..cc1277d 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.html
    @@ -34,501 +34,496 @@
     026import java.util.List;
     027import java.util.Map;
     028import java.util.Set;
    -029import java.util.TreeMap;
    -030import java.util.stream.Collectors;
    -031
    -032import 
    org.apache.commons.lang3.StringUtils;
    -033import 
    org.apache.hadoop.conf.Configuration;
    -034import 
    org.apache.hadoop.hbase.CompoundConfiguration;
    -035import 
    org.apache.hadoop.hbase.HBaseConfiguration;
    -036import 
    org.apache.hadoop.hbase.TableName;
    -037import 
    org.apache.hadoop.hbase.exceptions.DeserializationException;
    -038import 
    org.apache.hadoop.hbase.replication.ReplicationException;
    -039import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
    -040import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfigBuilder;
    -041import 
    org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
    -042import 
    org.apache.hadoop.hbase.util.Bytes;
    -043import 
    org.apache.hadoop.hbase.util.Strings;
    -044import 
    org.apache.yetus.audience.InterfaceAudience;
    -045import 
    org.apache.yetus.audience.InterfaceStability;
    -046import org.slf4j.Logger;
    -047import org.slf4j.LoggerFactory;
    -048
    -049import 
    org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
    -050import 
    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
    -051import 
    org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
    -052import 
    org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
    -053import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
    -054import 
    org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
    -055
    -056/**
    -057 * Helper for TableCFs Operations.
    -058 */
    -059@InterfaceAudience.Private
    -060@InterfaceStability.Stable
    -061public final class 
    ReplicationPeerConfigUtil {
    -062
    -063  private static final Logger LOG = 
    LoggerFactory.getLogger(ReplicationPeerConfigUtil.class);
    -064
    -065  private ReplicationPeerConfigUtil() 
    {}
    -066
    -067  public static String 
    convertToString(SetString namespaces) {
    -068if (namespaces == null) {
    -069  return null;
    -070}
    -071return StringUtils.join(namespaces, 
    ';');
    -072  }
    -073
    -074  /** convert map to TableCFs Object */
    -075  public static 
    ReplicationProtos.TableCF[] convert(
    -076  MapTableName, ? extends 
    CollectionString tableCfs) {
    -077if (tableCfs == null) {
    -078  return null;
    -079}
    -080ListReplicationProtos.TableCF 
    tableCFList = new ArrayList(tableCfs.entrySet().size());
    -081ReplicationProtos.TableCF.Builder 
    tableCFBuilder =  ReplicationProtos.TableCF.newBuilder();
    -082for (Map.EntryTableName, ? 
    extends CollectionString entry : tableCfs.entrySet()) {
    -083  tableCFBuilder.clear();
    -084  
    tableCFBuilder.setTableName(ProtobufUtil.toProtoTableName(entry.getKey()));
    -085  CollectionString v = 
    entry.getValue();
    -086  if (v != null  
    !v.isEmpty()) {
    -087for (String value : 
    entry.getValue()) {
    -088  
    tableCFBuilder.addFamilies(ByteString.copyFromUtf8(value));
    -089}
    -090  }
    -091  
    tableCFList.add(tableCFBuilder.build());
    -092}
    -093return tableCFList.toArray(new 
    ReplicationProtos.TableCF[tableCFList.size()]);
    -094  }
    -095
    -096  public static String 
    convertToString(MapTableName, ? extends CollectionString 
    tableCfs) {
    -097if (tableCfs == null) {
    -098  return null;
    -099}
    -100return convert(convert(tableCfs));
    -101  }
    -102
    -103  /**
    -104   *  Convert string to TableCFs 
    Object.
    -105   *  This is only for read TableCFs 
    information from TableCF node.
    -106   *  Input String Format: 
    ns1.table1:cf1,cf2;ns2.table2:cfA,cfB;ns3.table3.
    -107   * */
    -108  public static 
    ReplicationProtos.TableCF[] convert(String tableCFsConfig) {
    -109if (tableCFsConfig == null || 
    tableCFsConfig.trim().length() == 0) {
    -110  return null;
    -111}
    -112
    -113ReplicationProtos.TableCF.Builder 
    tableCFBuilder = ReplicationProtos.TableCF.newBuilder();
    -114String[] tables = 
    tableCFsConfig.split(";");
    -115ListReplicationProtos.TableCF 
    tableCFList = new ArrayList(tables.length);
    -116
    -117for (String tab : tables) {
    -118  // 1 ignore empty table config
    -119  tab = tab.trim();
    -120  if (tab.length() == 0) {
    -121continue;
    -122  }
    -123  // 2 split to "table" and 
    "cf1,cf2"
    -124  //   for each table: 
    "table#cf1,cf2" or "table"
    -125  String[] pair = 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7c0589c0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.TableWaitForStateCallable.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.TableWaitForStateCallable.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.TableWaitForStateCallable.html
    index 6fecbc9..2accda0 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.TableWaitForStateCallable.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TableFuture.TableWaitForStateCallable.html
    @@ -34,4140 +34,4141 @@
     026import 
    java.nio.charset.StandardCharsets;
     027import java.util.ArrayList;
     028import java.util.Arrays;
    -029import java.util.Collection;
    -030import java.util.EnumSet;
    -031import java.util.HashMap;
    -032import java.util.Iterator;
    -033import java.util.LinkedList;
    -034import java.util.List;
    -035import java.util.Map;
    -036import java.util.Set;
    -037import java.util.concurrent.Callable;
    -038import 
    java.util.concurrent.ExecutionException;
    -039import java.util.concurrent.Future;
    -040import java.util.concurrent.TimeUnit;
    -041import 
    java.util.concurrent.TimeoutException;
    -042import 
    java.util.concurrent.atomic.AtomicInteger;
    -043import 
    java.util.concurrent.atomic.AtomicReference;
    -044import java.util.regex.Pattern;
    -045import java.util.stream.Collectors;
    -046import java.util.stream.Stream;
    -047import 
    org.apache.hadoop.conf.Configuration;
    -048import 
    org.apache.hadoop.hbase.Abortable;
    -049import 
    org.apache.hadoop.hbase.CacheEvictionStats;
    -050import 
    org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
    -051import 
    org.apache.hadoop.hbase.ClusterMetrics.Option;
    -052import 
    org.apache.hadoop.hbase.ClusterStatus;
    -053import 
    org.apache.hadoop.hbase.DoNotRetryIOException;
    -054import 
    org.apache.hadoop.hbase.HBaseConfiguration;
    -055import 
    org.apache.hadoop.hbase.HConstants;
    -056import 
    org.apache.hadoop.hbase.HRegionInfo;
    -057import 
    org.apache.hadoop.hbase.HRegionLocation;
    -058import 
    org.apache.hadoop.hbase.HTableDescriptor;
    -059import 
    org.apache.hadoop.hbase.MasterNotRunningException;
    -060import 
    org.apache.hadoop.hbase.MetaTableAccessor;
    -061import 
    org.apache.hadoop.hbase.NamespaceDescriptor;
    -062import 
    org.apache.hadoop.hbase.NamespaceNotFoundException;
    -063import 
    org.apache.hadoop.hbase.NotServingRegionException;
    -064import 
    org.apache.hadoop.hbase.RegionLoad;
    -065import 
    org.apache.hadoop.hbase.RegionLocations;
    -066import 
    org.apache.hadoop.hbase.ServerName;
    -067import 
    org.apache.hadoop.hbase.TableExistsException;
    -068import 
    org.apache.hadoop.hbase.TableName;
    -069import 
    org.apache.hadoop.hbase.TableNotDisabledException;
    -070import 
    org.apache.hadoop.hbase.TableNotFoundException;
    -071import 
    org.apache.hadoop.hbase.UnknownRegionException;
    -072import 
    org.apache.hadoop.hbase.ZooKeeperConnectionException;
    -073import 
    org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
    -074import 
    org.apache.hadoop.hbase.client.replication.TableCFs;
    -075import 
    org.apache.hadoop.hbase.client.security.SecurityCapability;
    -076import 
    org.apache.hadoop.hbase.exceptions.TimeoutIOException;
    -077import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
    -078import 
    org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
    -079import 
    org.apache.hadoop.hbase.ipc.HBaseRpcController;
    -080import 
    org.apache.hadoop.hbase.ipc.RpcControllerFactory;
    -081import 
    org.apache.hadoop.hbase.quotas.QuotaFilter;
    -082import 
    org.apache.hadoop.hbase.quotas.QuotaRetriever;
    -083import 
    org.apache.hadoop.hbase.quotas.QuotaSettings;
    -084import 
    org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
    -085import 
    org.apache.hadoop.hbase.replication.ReplicationException;
    -086import 
    org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
    -087import 
    org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
    -088import 
    org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
    -089import 
    org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
    -090import 
    org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
    -091import 
    org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
    -092import 
    org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
    -093import 
    org.apache.hadoop.hbase.util.Addressing;
    -094import 
    org.apache.hadoop.hbase.util.Bytes;
    -095import 
    org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
    -096import 
    org.apache.hadoop.hbase.util.ForeignExceptionUtil;
    -097import 
    org.apache.hadoop.hbase.util.Pair;
    -098import 
    org.apache.hadoop.ipc.RemoteException;
    -099import 
    org.apache.hadoop.util.StringUtils;
    -100import 
    org.apache.yetus.audience.InterfaceAudience;
    -101import 
    org.apache.yetus.audience.InterfaceStability;
    -102import org.slf4j.Logger;
    -103import org.slf4j.LoggerFactory;
    -104
    -105import 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/devapidocs/org/apache/hadoop/hbase/ClusterMetricsBuilder.ClusterMetricsImpl.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ClusterMetricsBuilder.ClusterMetricsImpl.html
     
    b/devapidocs/org/apache/hadoop/hbase/ClusterMetricsBuilder.ClusterMetricsImpl.html
    new file mode 100644
    index 000..172ce3d
    --- /dev/null
    +++ 
    b/devapidocs/org/apache/hadoop/hbase/ClusterMetricsBuilder.ClusterMetricsImpl.html
    @@ -0,0 +1,666 @@
    +http://www.w3.org/TR/html4/loose.dtd;>
    +
    +
    +
    +
    +
    +ClusterMetricsBuilder.ClusterMetricsImpl (Apache HBase 3.0.0-SNAPSHOT 
    API)
    +
    +
    +
    +
    +
    +var methods = 
    {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
    +var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
    Methods"],8:["t4","Concrete Methods"]};
    +var altColor = "altColor";
    +var rowColor = "rowColor";
    +var tableTab = "tableTab";
    +var activeTableTab = "activeTableTab";
    +
    +
    +JavaScript is disabled on your browser.
    +
    +
    +
    +
    +
    +Skip navigation links
    +
    +
    +
    +
    +Overview
    +Package
    +Class
    +Use
    +Tree
    +Deprecated
    +Index
    +Help
    +
    +
    +
    +
    +PrevClass
    +NextClass
    +
    +
    +Frames
    +NoFrames
    +
    +
    +AllClasses
    +
    +
    +
    +
    +
    +
    +
    +Summary:
    +Nested|
    +Field|
    +Constr|
    +Method
    +
    +
    +Detail:
    +Field|
    +Constr|
    +Method
    +
    +
    +
    +
    +
    +
    +
    +
    +org.apache.hadoop.hbase
    +Class 
    ClusterMetricsBuilder.ClusterMetricsImpl
    +
    +
    +
    +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">java.lang.Object
    +
    +
    +org.apache.hadoop.hbase.ClusterMetricsBuilder.ClusterMetricsImpl
    +
    +
    +
    +
    +
    +
    +
    +All Implemented Interfaces:
    +ClusterMetrics
    +
    +
    +Enclosing class:
    +ClusterMetricsBuilder
    +
    +
    +
    +private static class ClusterMetricsBuilder.ClusterMetricsImpl
    +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
    +implements ClusterMetrics
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Nested Class Summary
    +
    +
    +
    +
    +Nested classes/interfaces inherited from 
    interfaceorg.apache.hadoop.hbase.ClusterMetrics
    +ClusterMetrics.Option
    +
    +
    +
    +
    +
    +
    +
    +
    +Field Summary
    +
    +Fields
    +
    +Modifier and Type
    +Field and Description
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListServerName
    +backupMasterNames
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
     title="class or interface in java.lang">Boolean
    +balancerOn
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String
    +clusterId
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListServerName
    +deadServerNames
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String
    +hbaseVersion
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">MapServerName,ServerMetrics
    +liveServerMetrics
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String
    +masterCoprocessorNames
    +
    +
    +private int
    +masterInfoPort
    +
    +
    +private ServerName
    +masterName
    +
    +
    +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListRegionState
    +regionsInTransition
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +Constructor Summary
    +
    +Constructors
    +
    +Constructor and Description
    +
    +
    +ClusterMetricsImpl(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">StringhbaseVersion,
    +  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in java.util">ListServerNamedeadServerNames,
    +  http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
     title="class or interface in java.util">MapServerName,ServerMetricsliveServerMetrics,
    +  ServerNamemasterName,
    +  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
     title="class or interface in 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html 
    b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html
    index 580681e..bc8777a 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.ChoreServicer.html
    @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
     
     
     
    -static interface ScheduledChore.ChoreServicer
    +static interface ScheduledChore.ChoreServicer
     
     
     
    @@ -177,7 +177,7 @@ var activeTableTab = "activeTableTab";
     
     
     cancelChore
    -voidcancelChore(ScheduledChorechore)
    +voidcancelChore(ScheduledChorechore)
     Cancel any ongoing schedules that this chore has with the 
    implementer of this interface.
     
     
    @@ -187,7 +187,7 @@ var activeTableTab = "activeTableTab";
     
     
     cancelChore
    -voidcancelChore(ScheduledChorechore,
    +voidcancelChore(ScheduledChorechore,
      booleanmayInterruptIfRunning)
     
     
    @@ -197,7 +197,7 @@ var activeTableTab = "activeTableTab";
     
     
     isChoreScheduled
    -booleanisChoreScheduled(ScheduledChorechore)
    +booleanisChoreScheduled(ScheduledChorechore)
     
     Returns:
     true when the chore is scheduled with the implementer of this 
    interface
    @@ -210,7 +210,7 @@ var activeTableTab = "activeTableTab";
     
     
     triggerNow
    -booleantriggerNow(ScheduledChorechore)
    +booleantriggerNow(ScheduledChorechore)
     This method tries to execute the chore immediately. If the 
    chore is executing at the time of
      this call, the chore will begin another execution as soon as the current 
    execution finishes
      
    @@ -227,7 +227,7 @@ var activeTableTab = "activeTableTab";
     
     
     onChoreMissedStartTime
    -voidonChoreMissedStartTime(ScheduledChorechore)
    +voidonChoreMissedStartTime(ScheduledChorechore)
     A callback that tells the implementer of this interface 
    that one of the scheduled chores is
      missing its start time. The implication of a chore missing its start time is 
    that the
      service's current means of scheduling may not be sufficient to handle the 
    number of ongoing
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
    --
    diff --git a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html 
    b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
    index 2bb1d8c..279930f 100644
    --- a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
    +++ b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
    @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
     
     
     @InterfaceAudience.Public
    -public abstract class ScheduledChore
    +public abstract class ScheduledChore
     extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
     title="class or interface in java.lang">Object
     implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
     title="class or interface in java.lang">Runnable
     ScheduledChore is a task performed on a period in hbase. 
    ScheduledChores become active once
    @@ -194,7 +194,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
     initialDelay
     
     
    -private static 
    org.apache.commons.logging.Log
    +private static org.slf4j.Logger
     LOG
     
     
    @@ -428,7 +428,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
     
     
     LOG
    -private static finalorg.apache.commons.logging.Log LOG
    +private static finalorg.slf4j.Logger LOG
     
     
     
    @@ -437,7 +437,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
     
     
     name
    -private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String name
    +private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
     title="class or interface in java.lang">String name
     
     
     
    @@ -446,7 +446,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
     
     
     DEFAULT_TIME_UNIT
    -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in java.util.concurrent">TimeUnit DEFAULT_TIME_UNIT
    +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
     title="class or interface in java.util.concurrent">TimeUnit DEFAULT_TIME_UNIT
     Default values for scheduling parameters should they be 
    excluded during construction
     
     
    @@ -456,7 +456,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
     
     
     DEFAULT_INITIAL_DELAY
    -private static finallong DEFAULT_INITIAL_DELAY
    +private static finallong DEFAULT_INITIAL_DELAY
     
     See Also:
     Constant
     Field Values
    @@ -469,7 +469,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
     
     
     period
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/RawFloat.html
    --
    diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/RawFloat.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/types/RawFloat.html
    index 01aef5b..16843d4 100644
    --- a/apidocs/src-html/org/apache/hadoop/hbase/types/RawFloat.html
    +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/RawFloat.html
    @@ -25,10 +25,10 @@
     017 */
     018package org.apache.hadoop.hbase.types;
     019
    -020import 
    org.apache.yetus.audience.InterfaceAudience;
    -021import 
    org.apache.hadoop.hbase.util.Bytes;
    -022import 
    org.apache.hadoop.hbase.util.Order;
    -023import 
    org.apache.hadoop.hbase.util.PositionedByteRange;
    +020import 
    org.apache.hadoop.hbase.util.Bytes;
    +021import 
    org.apache.hadoop.hbase.util.Order;
    +022import 
    org.apache.hadoop.hbase.util.PositionedByteRange;
    +023import 
    org.apache.yetus.audience.InterfaceAudience;
     024
     025/**
     026 * An {@code DataType} for interacting 
    with values encoded using
    @@ -41,56 +41,68 @@
     033public class RawFloat implements 
    DataTypeFloat {
     034
     035  @Override
    -036  public boolean isOrderPreserving() { 
    return false; }
    -037
    -038  @Override
    -039  public Order getOrder() { return null; 
    }
    -040
    -041  @Override
    -042  public boolean isNullable() { return 
    false; }
    -043
    -044  @Override
    -045  public boolean isSkippable() { return 
    true; }
    -046
    -047  @Override
    -048  public int encodedLength(Float val) { 
    return Bytes.SIZEOF_FLOAT; }
    +036  public boolean isOrderPreserving() {
    +037return false;
    +038  }
    +039
    +040  @Override
    +041  public Order getOrder() {
    +042return null;
    +043  }
    +044
    +045  @Override
    +046  public boolean isNullable() {
    +047return false;
    +048  }
     049
     050  @Override
    -051  public ClassFloat 
    encodedClass() { return Float.class; }
    -052
    -053  @Override
    -054  public int skip(PositionedByteRange 
    src) {
    -055src.setPosition(src.getPosition() + 
    Bytes.SIZEOF_FLOAT);
    -056return Bytes.SIZEOF_FLOAT;
    -057  }
    -058
    -059  @Override
    -060  public Float decode(PositionedByteRange 
    src) {
    -061float val = 
    Bytes.toFloat(src.getBytes(), src.getOffset() + src.getPosition());
    -062skip(src);
    -063return val;
    -064  }
    -065
    -066  @Override
    -067  public int encode(PositionedByteRange 
    dst, Float val) {
    -068Bytes.putFloat(dst.getBytes(), 
    dst.getOffset() + dst.getPosition(), val);
    -069return skip(dst);
    -070  }
    -071
    -072  /**
    -073   * Read a {@code float} value from the 
    buffer {@code buff}.
    -074   */
    -075  public float decodeFloat(byte[] buff, 
    int offset) {
    -076return Bytes.toFloat(buff, offset);
    -077  }
    -078
    -079  /**
    -080   * Write instance {@code val} into 
    buffer {@code buff}.
    -081   */
    -082  public int encodeFloat(byte[] buff, int 
    offset, float val) {
    -083return Bytes.putFloat(buff, offset, 
    val);
    -084  }
    -085}
    +051  public boolean isSkippable() {
    +052return true;
    +053  }
    +054
    +055  @Override
    +056  public int encodedLength(Float val) {
    +057return Bytes.SIZEOF_FLOAT;
    +058  }
    +059
    +060  @Override
    +061  public ClassFloat 
    encodedClass() {
    +062return Float.class;
    +063  }
    +064
    +065  @Override
    +066  public int skip(PositionedByteRange 
    src) {
    +067src.setPosition(src.getPosition() + 
    Bytes.SIZEOF_FLOAT);
    +068return Bytes.SIZEOF_FLOAT;
    +069  }
    +070
    +071  @Override
    +072  public Float decode(PositionedByteRange 
    src) {
    +073float val = 
    Bytes.toFloat(src.getBytes(), src.getOffset() + src.getPosition());
    +074skip(src);
    +075return val;
    +076  }
    +077
    +078  @Override
    +079  public int encode(PositionedByteRange 
    dst, Float val) {
    +080Bytes.putFloat(dst.getBytes(), 
    dst.getOffset() + dst.getPosition(), val);
    +081return skip(dst);
    +082  }
    +083
    +084  /**
    +085   * Read a {@code float} value from the 
    buffer {@code buff}.
    +086   */
    +087  public float decodeFloat(byte[] buff, 
    int offset) {
    +088return Bytes.toFloat(buff, offset);
    +089  }
    +090
    +091  /**
    +092   * Write instance {@code val} into 
    buffer {@code buff}.
    +093   */
    +094  public int encodeFloat(byte[] buff, int 
    offset, float val) {
    +095return Bytes.putFloat(buff, offset, 
    val);
    +096  }
    +097}
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/RawInteger.html
    --
    diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/RawInteger.html 
    b/apidocs/src-html/org/apache/hadoop/hbase/types/RawInteger.html
    index 90e0ed8..3af2af5 100644
    --- a/apidocs/src-html/org/apache/hadoop/hbase/types/RawInteger.html
    +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/RawInteger.html
    @@ -25,10 +25,10 @@
     017 */
     018package org.apache.hadoop.hbase.types;
     019
    -020import 
    org.apache.yetus.audience.InterfaceAudience;
    -021import 
    org.apache.hadoop.hbase.util.Bytes;
    -022import 
    org.apache.hadoop.hbase.util.Order;
    -023import 
    org.apache.hadoop.hbase.util.PositionedByteRange;
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
    index 555d97e..15c318d 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutput.html
    @@ -70,29 +70,34 @@
     062  int buffered();
     063
     064  /**
    -065   * Return current pipeline. Empty array 
    if no pipeline.
    +065   * Whether the stream is broken.
     066   */
    -067  DatanodeInfo[] getPipeline();
    +067  boolean isBroken();
     068
     069  /**
    -070   * Flush the buffer out.
    -071   * @param sync persistent the data to 
    device
    -072   * @return A CompletableFuture that 
    hold the acked length after flushing.
    -073   */
    -074  CompletableFutureLong 
    flush(boolean sync);
    -075
    -076  /**
    -077   * The close method when error 
    occurred.
    +070   * Return current pipeline. Empty array 
    if no pipeline.
    +071   */
    +072  DatanodeInfo[] getPipeline();
    +073
    +074  /**
    +075   * Flush the buffer out.
    +076   * @param sync persistent the data to 
    device
    +077   * @return A CompletableFuture that 
    hold the acked length after flushing.
     078   */
    -079  void 
    recoverAndClose(CancelableProgressable reporter) throws IOException;
    +079  CompletableFutureLong 
    flush(boolean sync);
     080
     081  /**
    -082   * Close the file. You should call 
    {@link #recoverAndClose(CancelableProgressable)} if this method
    -083   * throws an exception.
    -084   */
    -085  @Override
    -086  void close() throws IOException;
    -087}
    +082   * The close method when error 
    occurred.
    +083   */
    +084  void 
    recoverAndClose(CancelableProgressable reporter) throws IOException;
    +085
    +086  /**
    +087   * Close the file. You should call 
    {@link #recoverAndClose(CancelableProgressable)} if this method
    +088   * throws an exception.
    +089   */
    +090  @Override
    +091  void close() throws IOException;
    +092}
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
    index 9b7087a..1ce05b0 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
    @@ -26,160 +26,59 @@
     018package 
    org.apache.hadoop.hbase.io.asyncfs;
     019
     020import java.io.IOException;
    -021import java.io.InterruptedIOException;
    -022import java.nio.ByteBuffer;
    -023import 
    java.util.concurrent.CompletableFuture;
    -024import 
    java.util.concurrent.ExecutionException;
    -025import 
    java.util.concurrent.ExecutorService;
    -026import java.util.concurrent.Executors;
    -027
    -028import 
    org.apache.hadoop.fs.CommonConfigurationKeysPublic;
    -029import 
    org.apache.hadoop.fs.FSDataOutputStream;
    -030import org.apache.hadoop.fs.FileSystem;
    -031import org.apache.hadoop.fs.Path;
    -032import 
    org.apache.hadoop.hbase.io.ByteArrayOutputStream;
    -033import 
    org.apache.hadoop.hbase.util.CancelableProgressable;
    -034import 
    org.apache.hadoop.hbase.util.CommonFSUtils;
    -035import 
    org.apache.hadoop.hdfs.DistributedFileSystem;
    -036import 
    org.apache.hadoop.hdfs.protocol.DatanodeInfo;
    -037import 
    org.apache.yetus.audience.InterfaceAudience;
    +021
    +022import 
    org.apache.hadoop.fs.CommonConfigurationKeysPublic;
    +023import 
    org.apache.hadoop.fs.FSDataOutputStream;
    +024import org.apache.hadoop.fs.FileSystem;
    +025import org.apache.hadoop.fs.Path;
    +026import 
    org.apache.hadoop.hbase.util.CommonFSUtils;
    +027import 
    org.apache.hadoop.hdfs.DistributedFileSystem;
    +028import 
    org.apache.yetus.audience.InterfaceAudience;
    +029
    +030import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
    +031import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
    +032
    +033/**
    +034 * Helper class for creating 
    AsyncFSOutput.
    +035 */
    +036@InterfaceAudience.Private
    +037public final class AsyncFSOutputHelper 
    {
     038
    -039import 
    org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
    -040import 
    org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
    -041import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
    -042import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
    -043
    -044/**
    -045 * Helper class for creating 
    AsyncFSOutput.
    -046 */
    -047@InterfaceAudience.Private
    -048public final class AsyncFSOutputHelper 
    {
    -049
    -050  private AsyncFSOutputHelper() {
    -051  }
    -052
    -053  /**
    -054   * Create {@link 
    FanOutOneBlockAsyncDFSOutput} for {@link DistributedFileSystem}, and a simple
    -055   * implementation for 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html
    index 9392d16..069298a 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html
    @@ -96,355 +96,359 @@
     088
     089  @Override
     090  public void reloadConfiguration() {
    -091throw new 
    UnsupportedOperationException("Read-only Configuration");
    -092  }
    -093
    -094  @Override
    -095  public String get(String name) {
    -096return conf.get(name);
    -097  }
    -098
    -099  public void 
    setAllowNullValueProperties(boolean val) {
    -100throw new 
    UnsupportedOperationException("Read-only Configuration");
    +091// This is a write operation. We need 
    to allow it though because if any Configuration in
    +092// current JVM context calls 
    addDefaultResource, this forces a reload of all Configurations
    +093// (all Configurations are 
    'registered' by the default constructor. Rather than turn
    +094// somersaults, let this 'write' 
    operation through.
    +095this.conf.reloadConfiguration();
    +096  }
    +097
    +098  @Override
    +099  public String get(String name) {
    +100return conf.get(name);
     101  }
     102
    -103  public String getTrimmed(String name) 
    {
    -104return conf.getTrimmed(name);
    +103  public void 
    setAllowNullValueProperties(boolean val) {
    +104throw new 
    UnsupportedOperationException("Read-only Configuration");
     105  }
     106
    -107  @Override
    -108  public String getTrimmed(String name, 
    String defaultValue) {
    -109return conf.getTrimmed(name, 
    defaultValue);
    -110  }
    -111
    -112  @Override
    -113  public String getRaw(String name) {
    -114return conf.getRaw(name);
    -115  }
    -116
    -117  @Override
    -118  public void set(String name, String 
    value) {
    -119throw new 
    UnsupportedOperationException("Read-only Configuration");
    -120  }
    -121
    -122  @Override
    -123  public void set(String name, String 
    value, String source) {
    -124throw new 
    UnsupportedOperationException("Read-only Configuration");
    -125  }
    -126
    -127  @Override
    -128  public void unset(String name) {
    -129throw new 
    UnsupportedOperationException("Read-only Configuration");
    -130  }
    -131
    -132  @Override
    -133  public void setIfUnset(String name, 
    String value) {
    -134throw new 
    UnsupportedOperationException("Read-only Configuration");
    -135  }
    -136
    -137  @Override
    -138  public String get(String name, String 
    defaultValue) {
    -139return conf.get(name, 
    defaultValue);
    -140  }
    -141
    -142  @Override
    -143  public int getInt(String name, int 
    defaultValue) {
    -144return conf.getInt(name, 
    defaultValue);
    -145  }
    -146
    -147  @Override
    -148  public int[] getInts(String name) {
    -149return conf.getInts(name);
    -150  }
    -151
    -152  @Override
    -153  public void setInt(String name, int 
    value) {
    -154throw new 
    UnsupportedOperationException("Read-only Configuration");
    -155  }
    -156
    -157  @Override
    -158  public long getLong(String name, long 
    defaultValue) {
    -159return conf.getLong(name, 
    defaultValue);
    -160  }
    -161
    -162  @Override
    -163  public long getLongBytes(String name, 
    long defaultValue) {
    -164return conf.getLongBytes(name, 
    defaultValue);
    -165  }
    -166
    -167  @Override
    -168  public void setLong(String name, long 
    value) {
    -169throw new 
    UnsupportedOperationException("Read-only Configuration");
    -170  }
    -171
    -172  @Override
    -173  public float getFloat(String name, 
    float defaultValue) {
    -174return conf.getFloat(name, 
    defaultValue);
    -175  }
    -176
    -177  @Override
    -178  public void setFloat(String name, float 
    value) {
    -179throw new 
    UnsupportedOperationException("Read-only Configuration");
    -180  }
    -181
    -182  @Override
    -183  public double getDouble(String name, 
    double defaultValue) {
    -184return conf.getDouble(name, 
    defaultValue);
    -185  }
    -186
    -187  @Override
    -188  public void setDouble(String name, 
    double value) {
    -189throw new 
    UnsupportedOperationException("Read-only Configuration");
    -190  }
    -191
    -192  @Override
    -193  public boolean getBoolean(String name, 
    boolean defaultValue) {
    -194return conf.getBoolean(name, 
    defaultValue);
    -195  }
    -196
    -197  @Override
    -198  public void setBoolean(String name, 
    boolean value) {
    -199throw new 
    UnsupportedOperationException("Read-only Configuration");
    -200  }
    -201
    -202  @Override
    -203  public void setBooleanIfUnset(String 
    name, boolean value) {
    -204throw new 
    UnsupportedOperationException("Read-only Configuration");
    -205  }
    -206
    -207  @Override
    -208  public T extends EnumT 
    void setEnum(String name, T value) {
    -209throw new 
    UnsupportedOperationException("Read-only Configuration");
    -210  }
    -211
    -212  @Override
    -213  public T extends EnumT 
    T getEnum(String 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
    --
    diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
    b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
    index 915e78a..abee553 100644
    --- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
    +++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
    @@ -36,8 +36,8 @@
     028import java.util.regex.Pattern;
     029
     030import 
    org.apache.commons.lang3.ArrayUtils;
    -031import 
    org.apache.yetus.audience.InterfaceAudience;
    -032import 
    org.apache.hadoop.hbase.util.Bytes;
    +031import 
    org.apache.hadoop.hbase.util.Bytes;
    +032import 
    org.apache.yetus.audience.InterfaceAudience;
     033
     034/**
     035 * HConstants holds a bunch of 
    HBase-related constants
    @@ -558,786 +558,800 @@
     550  /**
     551   * Timestamp to use when we want to 
    refer to the latest cell.
     552   *
    -553   * On client side, this is the 
    timestamp set by default when no timestamp is specified, to refer to the 
    latest.
    -554   * On server side, this acts as a 
    notation.
    -555   * (1) For a cell of Put, which has 
    this notation,
    -556   * its timestamp will be replaced 
    with server's current time.
    -557   * (2) For a cell of Delete, which has 
    this notation,
    -558   * A. If the cell is of {@link 
    KeyValue.Type#Delete}, HBase issues a Get operation firstly.
    -559   *a. When the count of cell it 
    gets is less than the count of cell to delete,
    -560   *   the timestamp of Delete 
    cell will be replaced with server's current time.
    -561   *b. When the count of cell it 
    gets is equal to the count of cell to delete,
    -562   *   the timestamp of Delete 
    cell will be replaced with the latest timestamp of cell it gets.
    -563   *   (c. It is invalid and an 
    exception will be thrown,
    -564   *   if the count of cell it 
    gets is greater than the count of cell to delete,
    -565   *   as the max version of Get 
    is set to the count of cell to delete.)
    -566   * B. If the cell is of other 
    Delete types, like {@link KeyValue.Type#DeleteFamilyVersion},
    -567   *{@link 
    KeyValue.Type#DeleteColumn}, or {@link KeyValue.Type#DeleteFamily},
    -568   *the timestamp of Delete cell 
    will be replaced with server's current time.
    -569   *
    -570   * So that is why it is named as 
    "latest" but assigned as the max value of Long.
    -571   */
    -572  public static final long 
    LATEST_TIMESTAMP = Long.MAX_VALUE;
    -573
    -574  /**
    -575   * Timestamp to use when we want to 
    refer to the oldest cell.
    -576   * Special! Used in fake Cells only. 
    Should never be the timestamp on an actual Cell returned to
    -577   * a client.
    -578   * @deprecated Should not be public 
    since hbase-1.3.0. For internal use only. Move internal to
    -579   * Scanners flagged as special 
    timestamp value never to be returned as timestamp on a Cell.
    -580   */
    -581  @Deprecated
    -582  public static final long 
    OLDEST_TIMESTAMP = Long.MIN_VALUE;
    -583
    -584  /**
    -585   * LATEST_TIMESTAMP in bytes form
    -586   */
    -587  public static final byte [] 
    LATEST_TIMESTAMP_BYTES = {
    -588// big-endian
    -589(byte) (LATEST_TIMESTAMP  
    56),
    -590(byte) (LATEST_TIMESTAMP  
    48),
    -591(byte) (LATEST_TIMESTAMP  
    40),
    -592(byte) (LATEST_TIMESTAMP  
    32),
    -593(byte) (LATEST_TIMESTAMP  
    24),
    -594(byte) (LATEST_TIMESTAMP  
    16),
    -595(byte) (LATEST_TIMESTAMP  
    8),
    -596(byte) LATEST_TIMESTAMP,
    -597  };
    -598
    -599  /**
    -600   * Define for 'return-all-versions'.
    -601   */
    -602  public static final int ALL_VERSIONS = 
    Integer.MAX_VALUE;
    -603
    -604  /**
    -605   * Unlimited time-to-live.
    -606   */
    -607//  public static final int FOREVER = 
    -1;
    -608  public static final int FOREVER = 
    Integer.MAX_VALUE;
    -609
    -610  /**
    -611   * Seconds in a day, hour and minute
    -612   */
    -613  public static final int DAY_IN_SECONDS 
    = 24 * 60 * 60;
    -614  public static final int HOUR_IN_SECONDS 
    = 60 * 60;
    -615  public static final int 
    MINUTE_IN_SECONDS = 60;
    -616
    -617  //TODO: although the following are 
    referenced widely to format strings for
    -618  //  the shell. They really aren't a 
    part of the public API. It would be
    -619  //  nice if we could put them 
    somewhere where they did not need to be
    -620  //  public. They could have package 
    visibility
    -621  public static final String NAME = 
    "NAME";
    -622  public static final String VERSIONS = 
    "VERSIONS";
    -623  public static final String IN_MEMORY = 
    "IN_MEMORY";
    -624  public static final String METADATA = 
    "METADATA";
    -625  public static final String 
    CONFIGURATION = "CONFIGURATION";
    -626
    -627  /**
    -628   * Retrying we multiply 
    hbase.client.pause setting by what we have in this array until we
    -629   * run out of array items.  Retries 
    beyond this use the last number in the array.  So, for
    -630   * example, if hbase.client.pause is 1 
    second, and maximum retries count
    -631   * hbase.client.retries.number is 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
    --
    diff --git 
    a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html 
    b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
    index 6b003aa..4642766 100644
    --- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
    +++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
    @@ -1508,19 +1508,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
    and utility methods.
     default void
     RegionObserver.postWALRestore(ObserverContext? extends RegionCoprocessorEnvironmentctx,
       RegionInfoinfo,
    -  WALKeylogKey,
    +  WALKeylogKey,
       WALEditlogEdit)
    -Deprecated.
    -Since hbase-2.0.0. No 
    replacement. To be removed in hbase-3.0.0 and replaced
    - with something that doesn't expose IntefaceAudience.Private 
    classes.
    -
    +Called after a WALEdit
    + replayed for this region.
     
     
     
     default void
     WALObserver.postWALWrite(ObserverContext? extends WALCoprocessorEnvironmentctx,
     RegionInfoinfo,
    -WALKeylogKey,
    +WALKeylogKey,
     WALEditlogEdit)
     Deprecated.
     Since hbase-2.0.0. To be 
    replaced with an alternative that does not expose
    @@ -1621,19 +1619,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
    and utility methods.
     default void
     RegionObserver.preWALRestore(ObserverContext? extends RegionCoprocessorEnvironmentctx,
      RegionInfoinfo,
    - WALKeylogKey,
    + WALKeylogKey,
      WALEditlogEdit)
    -Deprecated.
    -Since hbase-2.0.0. No 
    replacement. To be removed in hbase-3.0.0 and replaced
    - with something that doesn't expose IntefaceAudience.Private 
    classes.
    -
    +Called before a WALEdit
    + replayed for this region.
     
     
     
     default void
     WALObserver.preWALWrite(ObserverContext? extends WALCoprocessorEnvironmentctx,
    RegionInfoinfo,
    -   WALKeylogKey,
    +   WALKeylogKey,
    WALEditlogEdit)
     Deprecated.
     Since hbase-2.0.0. To be 
    replaced with an alternative that does not expose
    @@ -5082,7 +5078,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     void
     RegionCoprocessorHost.postWALRestore(RegionInfoinfo,
    -  WALKeylogKey,
    +  WALKeylogKey,
       WALEditlogEdit)
     Deprecated.
     Since hbase-2.0.0. No 
    replacement. To be removed in hbase-3.0.0 and replaced
    @@ -5098,7 +5094,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     boolean
     RegionCoprocessorHost.preWALRestore(RegionInfoinfo,
    - WALKeylogKey,
    + WALKeylogKey,
      WALEditlogEdit)
     Deprecated.
     Since hbase-2.0.0. No 
    replacement. To be removed in hbase-3.0.0 and replaced
    @@ -5366,22 +5362,22 @@ Input/OutputFormats, a table indexing MapReduce job, 
    and utility methods.
     
     
     long
    -FSHLog.append(RegionInfohri,
    -  WALKeykey,
    +FSHLog.append(RegionInfohri,
    +  WALKeyImplkey,
       WALEditedits,
       booleaninMemstore)
     
     
     long
    -AsyncFSWAL.append(RegionInfohri,
    -  WALKeykey,
    +AsyncFSWAL.append(RegionInfohri,
    +  WALKeyImplkey,
       WALEditedits,
       booleaninMemstore)
     
     
     abstract long
    -AbstractFSWAL.append(RegionInfoinfo,
    -  WALKeykey,
    +AbstractFSWAL.append(RegionInfoinfo,
    +  WALKeyImplkey,
       WALEditedits,
       booleaninMemstore)
     NOTE: This append, at a time that is usually after this 
    call returns, starts an mvcc
    @@ -5389,7 +5385,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
    utility methods.
     
     
     
    -static WALKey
    +static WALKeyImpl
     WALUtil.doFullAppendTransaction(WALwal,
    http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
     title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
     title="class or interface in java.lang">IntegerreplicationScope,
    RegionInfohri,
    @@ -5403,29 +5399,19 @@ Input/OutputFormats, a table indexing MapReduce job, 
    and utility methods.
     
     void
     WALCoprocessorHost.postWALWrite(RegionInfoinfo,
    -WALKeylogKey,
    -WALEditlogEdit)
    -Deprecated.
    -Since hbase-2.0.0. No 
    replacement. To be removed in hbase-3.0.0 and replaced
    - with something that doesn't expose IntefaceAudience.Private 
    classes.
    -
    -
    +WALKeylogKey,
    +WALEditlogEdit)
     
     
     void
     WALCoprocessorHost.preWALWrite(RegionInfoinfo,
    -   WALKeylogKey,
    -   WALEditlogEdit)
    -Deprecated.
    -Since hbase-2.0.0. No 
    replacement. To be removed in hbase-3.0.0 and replaced
    - with something that doesn't expose IntefaceAudience.Private 
    classes.
    -
    -
    +   WALKeylogKey,
    +   WALEditlogEdit)
     
     
     protected long
    -AbstractFSWAL.stampSequenceIdAndPublishToRingBuffer(RegionInfohri,
    -

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
    index 31dca6b..563d5b8 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  Dependency 
    Information
     
    @@ -148,7 +148,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-05
    +  Last Published: 
    2017-12-06
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    index b60f886..4b02c4e 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  Project Dependency 
    Management
     
    @@ -775,18 +775,24 @@
     test-jar
     https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
    2.0
     
    +org.apache.hbase
    +http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
    +3.0.0-SNAPSHOT
    +test-jar
    +https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
    2.0
    +
     org.bouncycastle
     http://www.bouncycastle.org/java.html;>bcprov-jdk16
     1.46
     jar
     http://www.bouncycastle.org/licence.html;>Bouncy Castle 
    Licence
    -
    +
     org.hamcrest
     https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
     1.3
     jar
     http://www.opensource.org/licenses/bsd-license.php;>New BSD 
    License
    -
    +
     org.mockito
     http://mockito.org;>mockito-core
     2.1.0
    @@ -804,7 +810,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-05
    +  Last Published: 
    2017-12-06
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    index e1ea5fc..cc6dfe3 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  About
     
    @@ -119,7 +119,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-05
    +  Last Published: 
    2017-12-06
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    index d9fd2e6..2fdfea7 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Archetype builder  CI Management
     
    @@ -126,7 +126,7 @@
     https://www.apache.org/;>The Apache Software 
    Foundation.
     All rights reserved.  
     
    -  Last Published: 
    2017-12-05
    +  Last Published: 
    2017-12-06
     
     
     
    
    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
    --
    diff --git 
    a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
     
    b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
    index 6003316..7971bf8 100644
    --- 
    a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
    +++ 
    b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
    @@ -7,7 +7,7 @@
       
     
     
    -
    +
     
     Apache HBase - Exemplar for hbase-client archetype  Project 
    Dependencies
     
    @@ -1571,7 +1571,16 @@ Jackson JSON processor's data binding functionality.
     Description: JSch is a pure Java implementation of SSH2
     URL: http://www.jcraft.com/jsch/;>http://www.jcraft.com/jsch/
     Project Licenses: http://www.jcraft.com/jsch/LICENSE.txt;>Revised 
    BSD
    -org.apache.curator:curator-recipes:jar:4.0.0 (compile) 
    +org.apache.curator:curator-client:jar:4.0.0 (compile) 
    +
    +
    +Curator Client
    +
    +
    +Description: Low-level API
    +URL: http://curator.apache.org/curator-client;>http://curator.apache.org/curator-client
    +Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
    License, Version 2.0
    +org.apache.curator:curator-recipes:jar:4.0.0 (compile) 
     
     
     Curator Recipes
    @@ -1580,7 +1589,7 @@ Jackson JSON processor's data binding functionality.
     Description: All of the recipes listed on the ZooKeeper recipes doc 
    (except two phase commit).
     URL: http://curator.apache.org/curator-recipes;>http://curator.apache.org/curator-recipes
     Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
    License, Version 2.0
    -org.apache.commons:commons-compress:jar:1.4.1 (compile) 
    +org.apache.commons:commons-compress:jar:1.4.1 (compile) 
     
     
     Commons Compress
    @@ -1591,7 +1600,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     URL: http://commons.apache.org/compress/;>http://commons.apache.org/compress/
     Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
    License, Version 2.0
     
    -org.tukaani:xz:jar:1.0 (compile) 
    +org.tukaani:xz:jar:1.0 (compile) 
     
     
     XZ for Java
    @@ -1600,7 +1609,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     Description: XZ data compression
     URL: http://tukaani.org/xz/java.html;>http://tukaani.org/xz/java.html
     Project Licenses: Public 
    Domain
    -org.apache.hadoop:hadoop-auth:jar:2.7.4 (compile) 
    +org.apache.hadoop:hadoop-auth:jar:2.7.4 (compile) 
     
     
     Apache Hadoop Auth
    @@ -1609,7 +1618,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     Description: Apache Hadoop Auth - Java HTTP SPNEGO
     Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
    2.0
     
    -org.apache.httpcomponents:httpclient:jar:4.5.3 (compile) 
    +org.apache.httpcomponents:httpclient:jar:4.5.3 (compile) 
     
     
     Apache HttpClient
    @@ -1618,7 +1627,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     Description: Apache HttpComponents Client
     URL: http://hc.apache.org/httpcomponents-client;>http://hc.apache.org/httpcomponents-client
     Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
    2.0
    -org.apache.directory.server:apacheds-kerberos-codec:jar:2.0.0-M15 
    (compile) 
    +org.apache.directory.server:apacheds-kerberos-codec:jar:2.0.0-M15 
    (compile) 
     
     
     ApacheDS Protocol Kerberos Codec
    @@ -1628,7 +1637,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     URL: http://directory.apache.org/apacheds/1.5/apacheds-kerberos-codec;>http://directory.apache.org/apacheds/1.5/apacheds-kerberos-codec
     Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
    License, Version 2.0
     
    -org.apache.directory.server:apacheds-i18n:jar:2.0.0-M15 (compile) 
    +org.apache.directory.server:apacheds-i18n:jar:2.0.0-M15 (compile) 
     
     
     ApacheDS I18n
    @@ -1637,7 +1646,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     Description: Internationalization of errors and other messages
     URL: http://directory.apache.org/apacheds/1.5/apacheds-i18n;>http://directory.apache.org/apacheds/1.5/apacheds-i18n
     Project Licenses: http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
    License, Version 2.0
    -org.apache.directory.api:api-asn1-api:jar:1.0.0-M20 (compile) 
    +org.apache.directory.api:api-asn1-api:jar:1.0.0-M20 (compile) 
     
     
     Apache Directory API ASN.1 API
    @@ -1646,7 +1655,7 @@ These include: bzip2, gzip, pack200, xz and ar, cpio, 
    jar, tar, zip, dump.
     Description: ASN.1 API
     URL: http://directory.apache.org/api-parent/api-asn1-parent/api-asn1-api/;>http://directory.apache.org/api-parent/api-asn1-parent/api-asn1-api/
     Project Licenses: 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
    index 25e368d..d0f781f 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
    @@ -25,798 +25,798 @@
     017 */
     018package 
    org.apache.hadoop.hbase.io.asyncfs;
     019
    -020import static 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
    -021import static 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
    -022import static 
    org.apache.hadoop.fs.CreateFlag.CREATE;
    -023import static 
    org.apache.hadoop.fs.CreateFlag.OVERWRITE;
    -024import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
    -025import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
    +020import static 
    org.apache.hadoop.fs.CreateFlag.CREATE;
    +021import static 
    org.apache.hadoop.fs.CreateFlag.OVERWRITE;
    +022import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
    +023import static 
    org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
    +024import static 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
    +025import static 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
     026import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
     027import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;
     028import static 
    org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
     029import static 
    org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
     030
    -031import 
    org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
    -032import 
    org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
    -033import 
    com.google.protobuf.CodedOutputStream;
    -034
    -035import 
    org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
    -036import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBuf;
    -037import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufAllocator;
    -038import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufOutputStream;
    -039import 
    org.apache.hadoop.hbase.shaded.io.netty.buffer.PooledByteBufAllocator;
    -040import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
    -041import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFuture;
    -042import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFutureListener;
    -043import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler;
    -044import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
    -045import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInitializer;
    -046import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPipeline;
    -047import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
    -048import 
    org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler;
    -049import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
    -050import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
    -051import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateEvent;
    -052import 
    org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateHandler;
    -053import 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Future;
    -054import 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.FutureListener;
    -055import 
    org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
    -056
    -057import java.io.IOException;
    -058import 
    java.lang.reflect.InvocationTargetException;
    -059import java.lang.reflect.Method;
    -060import java.util.ArrayList;
    -061import java.util.EnumSet;
    -062import java.util.List;
    -063import java.util.concurrent.TimeUnit;
    -064
    -065import org.apache.commons.logging.Log;
    -066import 
    org.apache.commons.logging.LogFactory;
    -067import 
    org.apache.hadoop.conf.Configuration;
    -068import 
    org.apache.hadoop.crypto.CryptoProtocolVersion;
    -069import 
    org.apache.hadoop.crypto.Encryptor;
    -070import org.apache.hadoop.fs.CreateFlag;
    -071import org.apache.hadoop.fs.FileSystem;
    -072import 
    org.apache.hadoop.fs.FileSystemLinkResolver;
    -073import org.apache.hadoop.fs.Path;
    -074import 
    org.apache.hadoop.fs.UnresolvedLinkException;
    -075import 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    index d438f22..7c59e27 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.EnableTableFuture.html
    @@ -1290,8 +1290,8 @@
     1282   CompactType 
    compactType) throws IOException {
     1283switch (compactType) {
     1284  case MOB:
    -1285
    compact(this.connection.getAdminForMaster(), getMobRegionInfo(tableName), 
    major,
    -1286  columnFamily);
    +1285
    compact(this.connection.getAdminForMaster(), 
    RegionInfo.createMobRegionInfo(tableName),
    +1286major, columnFamily);
     1287break;
     1288  case NORMAL:
     1289checkTableExists(tableName);
    @@ -3248,7 +3248,7 @@
     3240  new 
    CallableAdminProtos.GetRegionInfoResponse.CompactionState() {
     3241@Override
     3242public 
    AdminProtos.GetRegionInfoResponse.CompactionState call() throws Exception {
    -3243  RegionInfo info = 
    getMobRegionInfo(tableName);
    +3243  RegionInfo info = 
    RegionInfo.createMobRegionInfo(tableName);
     3244  GetRegionInfoRequest 
    request =
     3245
    RequestConverter.buildGetRegionInfoRequest(info.getRegionName(), true);
     3246  GetRegionInfoResponse 
    response = masterAdmin.getRegionInfo(rpcController, request);
    @@ -3312,7 +3312,7 @@
     3304}
     3305break;
     3306  default:
    -3307throw new 
    IllegalArgumentException("Unknowne compactType: " + compactType);
    +3307throw new 
    IllegalArgumentException("Unknown compactType: " + compactType);
     3308}
     3309if (state != null) {
     3310  return 
    ProtobufUtil.createCompactionState(state);
    @@ -3847,325 +3847,320 @@
     3839});
     3840  }
     3841
    -3842  private RegionInfo 
    getMobRegionInfo(TableName tableName) {
    -3843return 
    RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes(".mob")).setRegionId(0)
    -3844.build();
    -3845  }
    -3846
    -3847  private RpcControllerFactory 
    getRpcControllerFactory() {
    -3848return this.rpcControllerFactory;
    -3849  }
    -3850
    -3851  @Override
    -3852  public void addReplicationPeer(String 
    peerId, ReplicationPeerConfig peerConfig, boolean enabled)
    -3853  throws IOException {
    -3854executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3855  @Override
    -3856  protected Void rpcCall() throws 
    Exception {
    -3857
    master.addReplicationPeer(getRpcController(),
    -3858  
    RequestConverter.buildAddReplicationPeerRequest(peerId, peerConfig, 
    enabled));
    -3859return null;
    -3860  }
    -3861});
    -3862  }
    -3863
    -3864  @Override
    -3865  public void 
    removeReplicationPeer(String peerId) throws IOException {
    -3866executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3867  @Override
    -3868  protected Void rpcCall() throws 
    Exception {
    -3869
    master.removeReplicationPeer(getRpcController(),
    -3870  
    RequestConverter.buildRemoveReplicationPeerRequest(peerId));
    -3871return null;
    -3872  }
    -3873});
    -3874  }
    -3875
    -3876  @Override
    -3877  public void 
    enableReplicationPeer(final String peerId) throws IOException {
    -3878executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3879  @Override
    -3880  protected Void rpcCall() throws 
    Exception {
    -3881
    master.enableReplicationPeer(getRpcController(),
    -3882  
    RequestConverter.buildEnableReplicationPeerRequest(peerId));
    -3883return null;
    -3884  }
    -3885});
    -3886  }
    -3887
    -3888  @Override
    -3889  public void 
    disableReplicationPeer(final String peerId) throws IOException {
    -3890executeCallable(new 
    MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
    -3891  @Override
    -3892  protected Void rpcCall() throws 
    Exception {
    -3893
    master.disableReplicationPeer(getRpcController(),
    -3894  
    RequestConverter.buildDisableReplicationPeerRequest(peerId));
    -3895return null;
    -3896  }
    -3897});
    -3898  }
    -3899
    -3900  @Override
    -3901  public ReplicationPeerConfig 
    getReplicationPeerConfig(final String peerId) throws IOException {
    -3902return executeCallable(new 
    MasterCallableReplicationPeerConfig(getConnection(),
    -3903getRpcControllerFactory()) {
    -3904  @Override
    -3905  protected ReplicationPeerConfig 
    rpcCall() throws Exception {
    -3906GetReplicationPeerConfigResponse 
    response = master.getReplicationPeerConfig(
    -3907  getRpcController(), 
    

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
    index 29ea7b3..6ed75c9 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
    @@ -1313,7093 +1313,7082 @@
     1305
     1306  @Override
     1307  public boolean isSplittable() {
    -1308boolean result = isAvailable() 
     !hasReferences();
    -1309LOG.info("ASKED IF SPLITTABLE " + 
    result + " " + getRegionInfo().getShortNameToLog(),
    -1310  new Throwable("LOGGING: 
    REMOVE"));
    -1311// REMOVE BELOW
    -1312LOG.info("DEBUG LIST ALL FILES");
    -1313for (HStore store : 
    this.stores.values()) {
    -1314  LOG.info("store " + 
    store.getColumnFamilyName());
    -1315  for (HStoreFile sf : 
    store.getStorefiles()) {
    -1316
    LOG.info(sf.toStringDetailed());
    -1317  }
    -1318}
    -1319return result;
    -1320  }
    -1321
    -1322  @Override
    -1323  public boolean isMergeable() {
    -1324if (!isAvailable()) {
    -1325  LOG.debug("Region " + this
    -1326  + " is not mergeable because 
    it is closing or closed");
    -1327  return false;
    -1328}
    -1329if (hasReferences()) {
    -1330  LOG.debug("Region " + this
    -1331  + " is not mergeable because 
    it has references");
    -1332  return false;
    -1333}
    -1334
    -1335return true;
    +1308return isAvailable()  
    !hasReferences();
    +1309  }
    +1310
    +1311  @Override
    +1312  public boolean isMergeable() {
    +1313if (!isAvailable()) {
    +1314  LOG.debug("Region " + this
    +1315  + " is not mergeable because 
    it is closing or closed");
    +1316  return false;
    +1317}
    +1318if (hasReferences()) {
    +1319  LOG.debug("Region " + this
    +1320  + " is not mergeable because 
    it has references");
    +1321  return false;
    +1322}
    +1323
    +1324return true;
    +1325  }
    +1326
    +1327  public boolean areWritesEnabled() {
    +1328synchronized(this.writestate) {
    +1329  return 
    this.writestate.writesEnabled;
    +1330}
    +1331  }
    +1332
    +1333  @VisibleForTesting
    +1334  public MultiVersionConcurrencyControl 
    getMVCC() {
    +1335return mvcc;
     1336  }
     1337
    -1338  public boolean areWritesEnabled() {
    -1339synchronized(this.writestate) {
    -1340  return 
    this.writestate.writesEnabled;
    -1341}
    -1342  }
    -1343
    -1344  @VisibleForTesting
    -1345  public MultiVersionConcurrencyControl 
    getMVCC() {
    -1346return mvcc;
    -1347  }
    -1348
    -1349  @Override
    -1350  public long getMaxFlushedSeqId() {
    -1351return maxFlushedSeqId;
    +1338  @Override
    +1339  public long getMaxFlushedSeqId() {
    +1340return maxFlushedSeqId;
    +1341  }
    +1342
    +1343  /**
    +1344   * @return readpoint considering given 
    IsolationLevel. Pass {@code null} for default
    +1345   */
    +1346  public long 
    getReadPoint(IsolationLevel isolationLevel) {
    +1347if (isolationLevel != null 
     isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
    +1348  // This scan can read even 
    uncommitted transactions
    +1349  return Long.MAX_VALUE;
    +1350}
    +1351return mvcc.getReadPoint();
     1352  }
     1353
    -1354  /**
    -1355   * @return readpoint considering given 
    IsolationLevel. Pass {@code null} for default
    -1356   */
    -1357  public long 
    getReadPoint(IsolationLevel isolationLevel) {
    -1358if (isolationLevel != null 
     isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
    -1359  // This scan can read even 
    uncommitted transactions
    -1360  return Long.MAX_VALUE;
    -1361}
    -1362return mvcc.getReadPoint();
    -1363  }
    -1364
    -1365  public boolean 
    isLoadingCfsOnDemandDefault() {
    -1366return 
    this.isLoadingCfsOnDemandDefault;
    -1367  }
    -1368
    -1369  /**
    -1370   * Close down this HRegion.  Flush the 
    cache, shut down each HStore, don't
    -1371   * service any more calls.
    -1372   *
    -1373   * pThis method could take 
    some time to execute, so don't call it from a
    -1374   * time-sensitive thread.
    -1375   *
    -1376   * @return Vector of all the storage 
    files that the HRegion's component
    -1377   * HStores make use of.  It's a list 
    of all StoreFile objects. Returns empty
    -1378   * vector if already closed and null 
    if judged that it should not close.
    -1379   *
    -1380   * @throws IOException e
    -1381   * @throws DroppedSnapshotException 
    Thrown when replay of wal is required
    -1382   * because a Snapshot was not properly 
    persisted. The region is put in closing mode, and the
    -1383   * caller MUST abort after this.
    -1384   */
    -1385  public Mapbyte[], 
    ListHStoreFile close() throws IOException {
    -1386return close(false);
    -1387  }
    -1388
    -1389  private final Object closeLock = new 
    Object();
    -1390
    -1391  /** Conf key 

    [10/51] [partial] hbase-site git commit: Published site at .

    http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
    --
    diff --git 
    a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
     
    b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
    index d98042d..d549086 100644
    --- 
    a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
    +++ 
    b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
    @@ -42,2537 +42,2536 @@
     034
     035import org.apache.commons.logging.Log;
     036import 
    org.apache.commons.logging.LogFactory;
    -037import 
    org.apache.yetus.audience.InterfaceAudience;
    +037import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
     038import 
    org.apache.hadoop.hbase.util.ByteBufferUtils;
     039import 
    org.apache.hadoop.hbase.util.Bytes;
     040import 
    org.apache.hadoop.hbase.util.ClassSize;
     041import 
    org.apache.hadoop.io.RawComparator;
    -042
    -043import 
    org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
    -044/**
    -045 * An HBase Key/Value. This is the 
    fundamental HBase Type.
    -046 * p
    -047 * HBase applications and users should 
    use the Cell interface and avoid directly using KeyValue and
    -048 * member functions not defined in 
    Cell.
    -049 * p
    -050 * If being used client-side, the primary 
    methods to access individual fields are
    -051 * {@link #getRowArray()}, {@link 
    #getFamilyArray()}, {@link #getQualifierArray()},
    -052 * {@link #getTimestamp()}, and {@link 
    #getValueArray()}. These methods allocate new byte arrays
    -053 * and return copies. Avoid their use 
    server-side.
    -054 * p
    -055 * Instances of this class are immutable. 
    They do not implement Comparable but Comparators are
    -056 * provided. Comparators change with 
    context, whether user table or a catalog table comparison. Its
    -057 * critical you use the appropriate 
    comparator. There are Comparators for normal HFiles, Meta's
    -058 * Hfiles, and bloom filter keys.
    -059 * p
    -060 * KeyValue wraps a byte array and takes 
    offsets and lengths into passed array at where to start
    -061 * interpreting the content as KeyValue. 
    The KeyValue format inside a byte array is:
    -062 * codelt;keylengthgt; 
    lt;valuelengthgt; lt;keygt; 
    lt;valuegt;/code Key is further
    -063 * decomposed as: 
    codelt;rowlengthgt; lt;rowgt; 
    lt;columnfamilylengthgt;
    -064 * lt;columnfamilygt; 
    lt;columnqualifiergt;
    -065 * lt;timestampgt; 
    lt;keytypegt;/code The coderowlength/code 
    maximum is
    -066 * 
    codeShort.MAX_SIZE/code, column family length maximum is 
    codeByte.MAX_SIZE/code, and
    -067 * column qualifier + key length must be 
    lt; codeInteger.MAX_SIZE/code. The column does not
    -068 * contain the family/qualifier 
    delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
    -069 * KeyValue can optionally contain Tags. 
    When it contains tags, it is added in the byte array after
    -070 * the value part. The format for this 
    part is: 
    codelt;tagslengthgt;lt;tagsbytesgt;/code.
    -071 * codetagslength/code 
    maximum is codeShort.MAX_SIZE/code. The 
    codetagsbytes/code
    -072 * contain one or more tags where as each 
    tag is of the form
    -073 * 
    codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
     codetagtype/code is one byte
    -074 * and codetaglength/code 
    maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
    type
    -075 * length and actual tag bytes length.
    -076 */
    -077@InterfaceAudience.Private
    -078public class KeyValue implements 
    ExtendedCell {
    -079  private static final 
    ArrayListTag EMPTY_ARRAY_LIST = new ArrayList();
    -080
    -081  private static final Log LOG = 
    LogFactory.getLog(KeyValue.class);
    -082
    -083  public static final long FIXED_OVERHEAD 
    = ClassSize.OBJECT + // the KeyValue object itself
    -084  ClassSize.REFERENCE + // pointer to 
    "bytes"
    -085  2 * Bytes.SIZEOF_INT + // offset, 
    length
    -086  Bytes.SIZEOF_LONG;// memstoreTS
    -087
    -088  /**
    -089   * Colon character in UTF-8
    -090   */
    -091  public static final char 
    COLUMN_FAMILY_DELIMITER = ':';
    -092
    -093  public static final byte[] 
    COLUMN_FAMILY_DELIM_ARRAY =
    -094new 
    byte[]{COLUMN_FAMILY_DELIMITER};
    -095
    -096  /**
    -097   * Comparator for plain key/values; 
    i.e. non-catalog table key/values. Works on Key portion
    -098   * of KeyValue only.
    -099   * @deprecated Use {@link 
    CellComparator#getInstance()} instead. Deprecated for hbase 2.0, remove for 
    hbase 3.0.
    -100   */
    -101  @Deprecated
    -102  public static final KVComparator 
    COMPARATOR = new KVComparator();
    -103  /**
    -104   * A {@link KVComparator} for 
    codehbase:meta/code catalog table
    -105   * {@link KeyValue}s.
    -106   * @deprecated Use {@link 
    CellComparatorImpl#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove 
    for hbase 3.0.
    -107   */
    -108  @Deprecated
    -109  public static final KVComparator 
    META_COMPARATOR = new MetaComparator();
    -110
    -111  /** Size of the key length field in 
    bytes*/
    -112  public static final int KEY_LENGTH_SIZE 
    = 

      1   2   3   >