[32/51] [partial] hbase-site git commit: Published site at 620d70d6186fb800299bcc62ad7179fccfd1be41.

2019-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.html 
b/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.html
new file mode 100644
index 000..249a94b
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.html
@@ -0,0 +1,318 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+ThriftClientBuilder (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.thrift2.client
+Class 
ThriftClientBuilder
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.thrift2.client.ThriftClientBuilder
+
+
+
+
+
+
+
+Direct Known Subclasses:
+ThriftConnection.DefaultThriftClientBuilder,
 ThriftConnection.HTTPThriftClientBuilder
+
+
+
+@InterfaceAudience.Private
+public abstract class ThriftClientBuilder
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+protected ThriftConnection
+connection
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+ThriftClientBuilder(ThriftConnectionconnection)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsAbstract Methods
+
+Modifier and Type
+Method and Description
+
+
+abstract Pairorg.apache.hadoop.hbase.thrift2.generated.THBaseService.Client,org.apache.thrift.transport.TTransport
+getClient()
+
+
+
+
+
+
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+connection
+protectedThriftConnection 
connection
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+ThriftClientBuilder
+publicThriftClientBuilder(ThriftConnectionconnection)
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+getClient
+public abstractPairorg.apache.hadoop.hbase.thrift2.generated.THBaseService.Client,org.apache.thrift.transport.TTransportgetClient()
+ 

[32/51] [partial] hbase-site git commit: Published site at 281d6429e55149cc4c05430dcc1d1dc136d8b245.

2019-01-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 5060801..3335df0 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class RawAsyncHBaseAdmin
+class RawAsyncHBaseAdmin
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -963,25 +963,31 @@ implements 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isRpcThrottleEnabled()
+Get if the rpc throttle is enabled.
+
+
+

[32/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
index 2b10b8f..bdc9077 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
@@ -106,6 +106,7 @@
 AsyncProcessTask.Builder
 AsyncProcessTask.ListRowAccess
 AsyncRegionLocator
+AsyncRegionLocatorHelper
 AsyncRegistryFactory
 AsyncRequestFutureImpl
 AsyncRequestFutureImpl.ReplicaResultState
@@ -210,6 +211,23 @@
 Query
 QuotaStatusCalls
 RawAsyncHBaseAdmin
+RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
+RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer
+RawAsyncHBaseAdmin.DeleteColumnFamilyProcedureBiConsumer
+RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.DisableTableProcedureBiConsumer
+RawAsyncHBaseAdmin.EnableTableProcedureBiConsumer
+RawAsyncHBaseAdmin.MergeTableRegionProcedureBiConsumer
+RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer
+RawAsyncHBaseAdmin.ModifyNamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.ModifyTableProcedureBiConsumer
+RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
+RawAsyncHBaseAdmin.ProcedureBiConsumer
+RawAsyncHBaseAdmin.ReplicationProcedureBiConsumer
+RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer
+RawAsyncHBaseAdmin.TableProcedureBiConsumer
+RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer
 RawAsyncTableImpl
 RegionAdminServiceCallable
 RegionCoprocessorRpcChannel

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
index 917a200..f594746 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -602,87 +602,93 @@
 
 
 
+AsyncRegionLocatorHelper
+
+Helper class for asynchronous region locator.
+
+
+
 AsyncRegistryFactory
 
 Get instance of configured Registry.
 
 
-
+
 AsyncRequestFutureImplCResult
 
 The context, and return value, for a single 
submit/submitAll call.
 
 
-
+
 AsyncRequestFutureImpl.ReplicaResultState
 
 Sync point for calls to multiple replicas for the same user 
request (Get).
 
 
-
+
 AsyncRpcRetryingCallerT
 
 
-
+
 AsyncRpcRetryingCallerFactory
 
 Factory to create an AsyncRpcRetryCaller.
 
 
-
+
 AsyncScanSingleRegionRpcRetryingCaller
 
 Retry caller for scanning a region.
 
 
-
+
 AsyncServerRequestRpcRetryingCallerT
 
 Retry caller for a request call to region server.
 
 
-
+
 AsyncSingleRequestRpcRetryingCallerT
 
 Retry caller for a single request, such as get, put, 
delete, etc.
 
 
-
+
 AsyncTableBuilderBaseC 
extends ScanResultConsumerBase
 
 Base class for all asynchronous table builders.
 
 
-
+
 AsyncTableImpl
 
 Just a wrapper of RawAsyncTableImpl.
 
 
-
+
 AsyncTableRegionLocatorImpl
 
 The implementation of AsyncRegionLocator.
 
 
-
+
 AsyncTableResultScanner
 
 The ResultScanner implementation 
for AsyncTable.
 
 
-
+
 BatchErrors
 
 
-
+
 BatchScanResultCache
 
 A scan result cache for batched scan, i.e,
  scan.getBatch()  0  
!scan.getAllowPartialResults().
 
 
-
+
 BufferedMutatorImpl
 
 
@@ -690,137 +696,137 @@
  but meant for batched, potentially asynchronous puts.
 
 
-
+
 BufferedMutatorParams
 
 Parameters for instantiating a BufferedMutator.
 
 
-
+
 CancellableRegionServerCallableT
 
 This class is used to unify HTable calls with AsyncProcess 
Framework.
 
 
-
+
 ClientAsyncPrefetchScanner
 
 ClientAsyncPrefetchScanner implements async scanner 
behaviour.
 
 
-
+
 ClientCoprocessorRpcController
 
 Client side rpc controller for coprocessor 
implementation.
 
 
-
+
 ClientIdGenerator
 
 The class that is able to determine some unique strings for 
the client,
  such as an IP address, PID, and composite deterministic ID.
 
 
-
+
 ClientScanner
 
 Implements the scanner interface for the HBase client.
 
 
-
+
 ClientServiceCallableT
 
 A RegionServerCallable set to use the Client protocol.
 
 
-
+
 ClientSideRegionScanner
 
 A client scanner for a region opened for read-only on the 
client side.
 
 
-
+
 ClientSimpleScanner
 
 ClientSimpleScanner implements a sync scanner 
behaviour.
 
 
-
+
 ClientUtil
 
 
-
+
 ClusterStatusListener
 
 A class that receives the cluster status, and provide it as 
a set of service to the client.
 
 
-
+
 ColumnFamilyDescriptorBuilder
 
 
-
+
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor
 
 An ModifyableFamilyDescriptor contains information about a 
column family such as the
  number of 

[32/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html 
b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
index 8fc0ace..38c35a3 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":9,"i49":9,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -100,10 +100,15 @@ var activeTableTab = "activeTableTab";
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+
 org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler
 
 
 
+
+
 
 
 
@@ -114,8 +119,8 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ThriftHBaseServiceHandler
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+public class ThriftHBaseServiceHandler
+extends HBaseServiceHandler
 implements org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 This class is a glue object that connects Thrift RPC calls 
to the HBase client API primarily
  defined in the Table interface.
@@ -139,10 +144,6 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 
 
 private static class
-ThriftHBaseServiceHandler.THBaseServiceMetricsProxy
-
-
-private static class
 ThriftHBaseServiceHandler.TIOErrorWithCause
 
 
@@ -161,14 +162,6 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 Field and Description
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-CLEANUP_INTERVAL
-
-
-private ConnectionCache
-connectionCache
-
-
 private static https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 ioe
 
@@ -181,18 +174,21 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 LOG
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-MAX_IDLETIME
-
-
 private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger
 nextScannerId
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,ResultScanner
 scannerMap
 
 
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.thrift.HBaseServiceHandler
+CLEANUP_INTERVAL,
 conf,
 connectionCache,
 MAX_IDLETIME,
 metrics
+
 
 
 
@@ -220,7 +216,7 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 
 Method Summary
 
-All MethodsStatic MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
@@ -427,7 +423,7 @@ implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface
 
 
 
-private 

[32/51] [partial] hbase-site git commit: Published site at 3ab895979b643a2980bcdb7fee2078f14b614210.

2019-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
index 3b2d877..656906f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html
@@ -646,6 +646,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
index b4d2a85..2870a3d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionImpl.html
@@ -938,6 +938,6 @@ implements Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index 4b8bc4e..a62e81e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -3713,6 +3713,6 @@ implements Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.Callable.html
index 1b9dba6..ea973b1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.Callable.html
@@ -228,6 +228,6 @@ public static interface Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
index a8bfef0..cfc519f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.html
@@ -382,6 +382,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
index 4d46e6d..0c27135 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.html
@@ -372,6 +372,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 


[32/51] [partial] hbase-site git commit: Published site at c448604ceb987d113913f0583452b2abce04db0d.

2018-12-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f8b8424/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index d1b845f..b3c08b5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class HRegion.BatchOperationT
+private abstract static class HRegion.BatchOperationT
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Class that tracks the progress of a batch operations, 
accumulating status codes and tracking
  the index at which processing is proceeding. These batch operations may get 
split into
@@ -408,7 +408,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 operations
-protected finalT[] operations
+protected finalT[] operations
 
 
 
@@ -417,7 +417,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 retCodeDetails
-protected finalOperationStatus[] retCodeDetails
+protected finalOperationStatus[] retCodeDetails
 
 
 
@@ -426,7 +426,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 walEditsFromCoprocessors
-protected finalWALEdit[] walEditsFromCoprocessors
+protected finalWALEdit[] walEditsFromCoprocessors
 
 
 
@@ -435,7 +435,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 familyCellMaps
-protected finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell[] familyCellMaps
+protected finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell[] familyCellMaps
 
 
 
@@ -444,7 +444,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 region
-protected finalHRegion region
+protected finalHRegion region
 
 
 
@@ -453,7 +453,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 nextIndexToProcess
-protectedint nextIndexToProcess
+protectedint nextIndexToProcess
 
 
 
@@ -462,7 +462,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 observedExceptions
-protected finalHRegion.ObservedExceptionsInBatch observedExceptions
+protected finalHRegion.ObservedExceptionsInBatch observedExceptions
 
 
 
@@ -471,7 +471,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 durability
-protectedDurability durability
+protectedDurability durability
 
 
 
@@ -480,7 +480,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 atomic
-protectedboolean atomic
+protectedboolean atomic
 
 
 
@@ -499,7 +499,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 BatchOperation
-publicBatchOperation(HRegionregion,
+publicBatchOperation(HRegionregion,
   T[]operations)
 
 
@@ -517,7 +517,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 visitBatchOperations
-publicvoidvisitBatchOperations(booleanpendingOnly,
+publicvoidvisitBatchOperations(booleanpendingOnly,
  intlastIndexExclusive,
  HRegion.BatchOperation.Visitorvisitor)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -534,7 +534,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMutation
-public abstractMutationgetMutation(intindex)
+public abstractMutationgetMutation(intindex)
 
 
 
@@ -543,7 +543,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getNonceGroup
-public abstractlonggetNonceGroup(intindex)
+public abstractlonggetNonceGroup(intindex)
 
 
 
@@ -552,7 +552,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getNonce
-public abstractlonggetNonce(intindex)
+public abstractlonggetNonce(intindex)
 
 
 
@@ -561,7 +561,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMutationsForCoprocs
-public abstractMutation[]getMutationsForCoprocs()
+public abstractMutation[]getMutationsForCoprocs()
 This method is potentially expensive and useful mostly for 

[32/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

2018-12-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
index 7b680e9..a730a53 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
@@ -73,55 +73,54 @@
 065  throw new 
IllegalStateException("hbase:meta must be initialized first before we can " +
 066  "assign out its replicas");
 067}
-068ServerName metaServername =
-069
this.master.getMetaTableLocator().getMetaRegionLocation(this.master.getZooKeeper());
-070for (int i = 1; i  numReplicas; 
i++) {
-071  // Get current meta state for 
replica from zk.
-072  RegionState metaState = 
MetaTableLocator.getMetaRegionState(master.getZooKeeper(), i);
-073  RegionInfo hri = 
RegionReplicaUtil.getRegionInfoForReplica(
-074  
RegionInfoBuilder.FIRST_META_REGIONINFO, i);
-075  
LOG.debug(hri.getRegionNameAsString() + " replica region state from zookeeper=" 
+ metaState);
-076  if 
(metaServername.equals(metaState.getServerName())) {
-077metaState = null;
-078
LOG.info(hri.getRegionNameAsString() +
-079  " old location is same as 
current hbase:meta location; setting location as null...");
-080  }
-081  // These assigns run inline. All is 
blocked till they complete. Only interrupt is shutting
-082  // down hosting server which calls 
AM#stop.
-083  if (metaState != null  
metaState.getServerName() != null) {
-084// Try to retain old 
assignment.
-085assignmentManager.assign(hri, 
metaState.getServerName());
-086  } else {
-087assignmentManager.assign(hri);
-088  }
-089}
-090
unassignExcessMetaReplica(numReplicas);
-091  }
-092
-093  private void 
unassignExcessMetaReplica(int numMetaReplicasConfigured) {
-094final ZKWatcher zooKeeper = 
master.getZooKeeper();
-095// unassign the unneeded replicas 
(for e.g., if the previous master was configured
-096// with a replication of 3 and now it 
is 2, we need to unassign the 1 unneeded replica)
-097try {
-098  ListString 
metaReplicaZnodes = zooKeeper.getMetaReplicaNodes();
-099  for (String metaReplicaZnode : 
metaReplicaZnodes) {
-100int replicaId = 
zooKeeper.getZNodePaths().getMetaReplicaIdFromZnode(metaReplicaZnode);
-101if (replicaId = 
numMetaReplicasConfigured) {
-102  RegionState r = 
MetaTableLocator.getMetaRegionState(zooKeeper, replicaId);
-103  LOG.info("Closing excess 
replica of meta region " + r.getRegion());
-104  // send a close and wait for a 
max of 30 seconds
-105  
ServerManager.closeRegionSilentlyAndWait(master.getClusterConnection(),
-106  r.getServerName(), 
r.getRegion(), 3);
-107  ZKUtil.deleteNode(zooKeeper, 
zooKeeper.getZNodePaths().getZNodeForReplica(replicaId));
-108}
-109  }
-110} catch (Exception ex) {
-111  // ignore the exception since we 
don't want the master to be wedged due to potential
-112  // issues in the cleanup of the 
extra regions. We can do that cleanup via hbck or manually
-113  LOG.warn("Ignoring exception " + 
ex);
-114}
-115  }
-116}
+068ServerName metaServername = 
MetaTableLocator.getMetaRegionLocation(this.master.getZooKeeper());
+069for (int i = 1; i  numReplicas; 
i++) {
+070  // Get current meta state for 
replica from zk.
+071  RegionState metaState = 
MetaTableLocator.getMetaRegionState(master.getZooKeeper(), i);
+072  RegionInfo hri = 
RegionReplicaUtil.getRegionInfoForReplica(
+073  
RegionInfoBuilder.FIRST_META_REGIONINFO, i);
+074  
LOG.debug(hri.getRegionNameAsString() + " replica region state from zookeeper=" 
+ metaState);
+075  if 
(metaServername.equals(metaState.getServerName())) {
+076metaState = null;
+077
LOG.info(hri.getRegionNameAsString() +
+078  " old location is same as 
current hbase:meta location; setting location as null...");
+079  }
+080  // These assigns run inline. All is 
blocked till they complete. Only interrupt is shutting
+081  // down hosting server which calls 
AM#stop.
+082  if (metaState != null  
metaState.getServerName() != null) {
+083// Try to retain old 
assignment.
+084assignmentManager.assign(hri, 
metaState.getServerName());
+085  } else {
+086assignmentManager.assign(hri);
+087  }
+088}
+089
unassignExcessMetaReplica(numReplicas);
+090  }
+091
+092  private void 
unassignExcessMetaReplica(int numMetaReplicasConfigured) {
+093final ZKWatcher zooKeeper = 
master.getZooKeeper();
+094// unassign the 

[32/51] [partial] hbase-site git commit: Published site at 1acbd36c903b048141866b143507bfce124a5c5f.

2018-11-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 609b574..28c0aaf 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -247,14 +247,14 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.master.HMaster
-abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createAssignmentManager,
 createMetaBootstrap,
 createNamespace,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable, decommissionRegionServers,
 decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable, enableReplicationPeer,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics, getClusterMetricsWithoutCoprocessor,
 getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime, getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterFinishedInitializationTime,
 getMasterMetrics,
 getMasterProcedureExecutor,
 getMasterProcedureManagerHost,
 getMasterQuotaManager, getMasterRpcServices,
 getMasterStartTime,
 getMasterWalManager,
 getMergePlanCount,
 getMetaTableObserver,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 
 getNumWALFiles,
 getProcedures,
 getProcessName,
 getQuotaObserverChore,
 getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getReplicationLoad,
 getReplicationPeerConfig,
 getReplicationPeerManager,
 getServerManager,
 getServerName,
 getSnapshotM
 anager, getSnapshotQuotaObserverChore,
 getSpaceQuotaSnapshotNotifier,
 getSplitOrMergeTracker,
 getSplitPlanCount,
 getSyncReplicationReplayWALManager,
 getTableRegionForRow,
 getTableStateManager,
 getUseThisHostnameInstead,
 getWalProcedureStore,
 getZooKeeper,
 initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 isActiveMaster,
 isBalancerOn,
 isCatalogJanitorEnabled,
 isCleanerChoreEnabled, isInitialized,
 isInMaintenanceMode,
 isNormalizerOn,
 isSplitOrMergeEnabled,
 listDecommissionedRegionServers,
 listReplicationPeers,
 listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames,
 listTableNamesByNamespace,
 login,
 main,
 mergeRegions,
 modifyColumn, modifyNamespace,
 modifyTable,
 move,
 normalizeRegions,
 recommissionRegionServer,
 registerService,
 remoteProcedureCompleted, remoteProcedureFailed,
 removeReplicationPeer,
 reportMobCompactionEnd,
 reportMobCompactionStart,
 requestMobCompaction,
 restoreSnapshot,
 setCatalogJanitorEnabled,
 setInitialized,
 shutdown,
 splitRegion,
 stop,
 stopMaster,
 stopServiceThreads,
 transitReplicationPeerSyncReplicationState,
 truncateTable,
 updateConfigurationForQuotasObserver,
 updateReplicationPeerConfig,
 waitForMasterActive,
 waitForMetaOnline,
 waitForNamespaceOnline
+abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createAssignmentManager,
 createMetaBootstrap,
 createNamespace,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable, decommissionRegionServers,
 decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable, enableReplicationPeer,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics, getClusterMetricsWithoutCoprocessor,
 getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime, getMasterCoprocessorHost,
 getMasterCoprocessors,
 

[32/51] [partial] hbase-site git commit: Published site at 130057f13774f6b213cdb06952c805a29d59396e.

2018-11-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/org/apache/hadoop/hbase/security/access/class-use/AuthManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/AuthManager.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/AuthManager.html
new file mode 100644
index 000..6dfcacd
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/AuthManager.html
@@ -0,0 +1,249 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.security.access.AuthManager 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.security.access.AuthManager
+
+
+
+
+
+Packages that use AuthManager
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.security.access
+
+
+
+
+
+
+
+
+
+
+Uses of AuthManager in org.apache.hadoop.hbase.security.access
+
+Fields in org.apache.hadoop.hbase.security.access
 declared as AuthManager
+
+Modifier and Type
+Field and Description
+
+
+
+private AuthManager
+AccessChecker.authManager
+
+
+private AuthManager
+ZKPermissionWatcher.authManager
+
+
+private AuthManager
+AccessControlFilter.authManager
+
+
+
+
+Fields in org.apache.hadoop.hbase.security.access
 with type parameters of type AuthManager
+
+Modifier and Type
+Field and Description
+
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapZKWatcher,AuthManager
+AuthManager.managerMap
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapAuthManager,https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+AuthManager.refCount
+
+
+
+
+Methods in org.apache.hadoop.hbase.security.access
 that return AuthManager
+
+Modifier and Type
+Method and Description
+
+
+
+AuthManager
+AccessChecker.getAuthManager()
+
+
+AuthManager
+AccessController.getAuthManager()
+
+
+static AuthManager
+AuthManager.getOrCreate(ZKWatcherwatcher,
+   org.apache.hadoop.conf.Configurationconf)
+Returns a AuthManager from the cache.
+
+
+
+
+
+Methods in org.apache.hadoop.hbase.security.access
 with parameters of type AuthManager
+
+Modifier and Type
+Method and Description
+
+
+
+static void
+AuthManager.release(AuthManagerinstance)
+Releases the resources for the given AuthManager if the 
reference count is down to 0.
+
+
+
+
+
+Constructors in org.apache.hadoop.hbase.security.access
 with parameters of type AuthManager
+
+Constructor and Description
+
+
+
+AccessControlFilter(AuthManagermgr,
+   Userugi,
+   TableNametableName,
+   AccessControlFilter.Strategystrategy,
+   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapByteRange,https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in 
java.lang">IntegercfVsMaxVersions)
+
+
+ZKPermissionWatcher(ZKWatcherwatcher,
+   AuthManagerauthManager,
+   
org.apache.hadoop.conf.Configurationconf)
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/org/apache/hadoop/hbase/security/access/class-use/GlobalPermission.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/GlobalPermission.html
 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/GlobalPermission.html
new file mode 100644

[32/51] [partial] hbase-site git commit: Published site at d5e4faacc354c1bc4d93efa71ca97ee3a056123e.

2018-10-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b5e107c3/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
index 0af8acd..c5f21ac 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
@@ -645,1615 +645,1597 @@
 637
proc.afterReplay(getEnvironment());
 638  }
 639});
-640
-641// 4. Push the procedures to the 
timeout executor
-642waitingTimeoutList.forEach(proc - 
{
-643  
proc.afterReplay(getEnvironment());
-644  timeoutExecutor.add(proc);
-645});
-646// 5. restore locks
-647restoreLocks();
-648// 6. Push the procedure to the 
scheduler
-649
failedList.forEach(scheduler::addBack);
-650runnableList.forEach(p - {
-651  p.afterReplay(getEnvironment());
-652  if (!p.hasParent()) {
-653
sendProcedureLoadedNotification(p.getProcId());
-654  }
-655  // If the procedure holds the lock, 
put the procedure in front
-656  // If its parent holds the lock, 
put the procedure in front
-657  // TODO. Is that possible that its 
ancestor holds the lock?
-658  // For now, the deepest procedure 
hierarchy is:
-659  // ModifyTableProcedure - 
ReopenTableProcedure -
-660  // MoveTableProcedure - 
Unassign/AssignProcedure
-661  // But ModifyTableProcedure and 
ReopenTableProcedure won't hold the lock
-662  // So, check parent lock is 
enough(a tricky case is resovled by HBASE-21384).
-663  // If some one change or add new 
procedures making 'grandpa' procedure
-664  // holds the lock, but parent 
procedure don't hold the lock, there will
-665  // be a problem here. We have to 
check one procedure's ancestors.
-666  // And we need to change 
LockAndQueue.hasParentLock(Procedure? proc) method
-667  // to check all ancestors too.
-668  if (p.isLockedWhenLoading() || 
(p.hasParent()  procedures
-669  
.get(p.getParentProcId()).isLockedWhenLoading())) {
-670scheduler.addFront(p, false);
-671  } else {
-672// if it was not, it can wait.
-673scheduler.addBack(p, false);
-674  }
-675});
-676// After all procedures put into the 
queue, signal the worker threads.
-677// Otherwise, there is a race 
condition. See HBASE-21364.
-678scheduler.signalAll();
-679  }
+640// 4. restore locks
+641restoreLocks();
+642
+643// 5. Push the procedures to the 
timeout executor
+644waitingTimeoutList.forEach(proc - 
{
+645  
proc.afterReplay(getEnvironment());
+646  timeoutExecutor.add(proc);
+647});
+648
+649// 6. Push the procedure to the 
scheduler
+650
failedList.forEach(scheduler::addBack);
+651runnableList.forEach(p - {
+652  p.afterReplay(getEnvironment());
+653  if (!p.hasParent()) {
+654
sendProcedureLoadedNotification(p.getProcId());
+655  }
+656  scheduler.addBack(p);
+657});
+658// After all procedures put into the 
queue, signal the worker threads.
+659// Otherwise, there is a race 
condition. See HBASE-21364.
+660scheduler.signalAll();
+661  }
+662
+663  /**
+664   * Initialize the procedure executor, 
but do not start workers. We will start them later.
+665   * p/
+666   * It calls 
ProcedureStore.recoverLease() and ProcedureStore.load() to recover the lease, 
and
+667   * ensure a single executor, and start 
the procedure replay to resume and recover the previous
+668   * pending and in-progress 
procedures.
+669   * @param numThreads number of threads 
available for procedure execution.
+670   * @param abortOnCorruption true if you 
want to abort your service in case a corrupted procedure
+671   *  is found on replay. 
otherwise false.
+672   */
+673  public void init(int numThreads, 
boolean abortOnCorruption) throws IOException {
+674// We have numThreads executor + one 
timer thread used for timing out
+675// procedures and triggering periodic 
procedures.
+676this.corePoolSize = numThreads;
+677this.maxPoolSize = 10 * numThreads;
+678LOG.info("Starting {} core workers 
(bigger of cpus/4 or 16) with max (burst) worker count={}",
+679corePoolSize, maxPoolSize);
 680
-681  /**
-682   * Initialize the procedure executor, 
but do not start workers. We will start them later.
-683   * p/
-684   * It calls 
ProcedureStore.recoverLease() and ProcedureStore.load() to recover the lease, 
and
-685   * ensure a single executor, and start 
the procedure replay to resume and recover the previous
-686   * pending and 

[32/51] [partial] hbase-site git commit: Published site at 3fe8649b2c9ba1271c25e8f476548907e4c7a90d.

2018-10-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8f09a71d/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFunction.html
index c7d99b2..9d1542c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.CostFunction.html
@@ -382,1357 +382,1365 @@
 374for (int i = 0; i  
this.curFunctionCosts.length; i++) {
 375  curFunctionCosts[i] = 
tempFunctionCosts[i];
 376}
-377LOG.info("start 
StochasticLoadBalancer.balancer, initCost=" + currentCost + ", functionCost="
-378+ functionCost());
+377double initCost = currentCost;
+378double newCost = currentCost;
 379
-380double initCost = currentCost;
-381double newCost = currentCost;
-382
-383long computedMaxSteps;
-384if (runMaxSteps) {
-385  computedMaxSteps = 
Math.max(this.maxSteps,
-386  ((long)cluster.numRegions * 
(long)this.stepsPerRegion * (long)cluster.numServers));
-387} else {
-388  computedMaxSteps = 
Math.min(this.maxSteps,
-389  ((long)cluster.numRegions * 
(long)this.stepsPerRegion * (long)cluster.numServers));
-390}
-391// Perform a stochastic walk to see 
if we can get a good fit.
-392long step;
-393
-394for (step = 0; step  
computedMaxSteps; step++) {
-395  Cluster.Action action = 
nextAction(cluster);
-396
-397  if (action.type == Type.NULL) {
-398continue;
-399  }
-400
-401  cluster.doAction(action);
-402  updateCostsWithAction(cluster, 
action);
-403
-404  newCost = computeCost(cluster, 
currentCost);
-405
-406  // Should this be kept?
-407  if (newCost  currentCost) {
-408currentCost = newCost;
-409
-410// save for JMX
-411curOverallCost = currentCost;
-412for (int i = 0; i  
this.curFunctionCosts.length; i++) {
-413  curFunctionCosts[i] = 
tempFunctionCosts[i];
-414}
-415  } else {
-416// Put things back the way they 
were before.
-417// TODO: undo by remembering old 
values
-418Action undoAction = 
action.undoAction();
-419cluster.doAction(undoAction);
-420updateCostsWithAction(cluster, 
undoAction);
-421  }
-422
-423  if 
(EnvironmentEdgeManager.currentTime() - startTime 
-424  maxRunningTime) {
-425break;
-426  }
-427}
-428long endTime = 
EnvironmentEdgeManager.currentTime();
-429
-430
metricsBalancer.balanceCluster(endTime - startTime);
-431
-432// update costs metrics
-433updateStochasticCosts(tableName, 
curOverallCost, curFunctionCosts);
-434if (initCost  currentCost) {
-435  plans = 
createRegionPlans(cluster);
-436  LOG.info("Finished computing new 
load balance plan. Computation took {}" +
-437" to try {} different iterations. 
 Found a solution that moves " +
-438"{} regions; Going from a 
computed cost of {}" +
-439" to a new cost of {}", 
java.time.Duration.ofMillis(endTime - startTime),
-440step, plans.size(), initCost, 
currentCost);
-441  return plans;
-442}
-443LOG.info("Could not find a better 
load balance plan.  Tried {} different configurations in " +
-444  "{}, and did not find anything with 
a computed cost less than {}", step,
-445  java.time.Duration.ofMillis(endTime 
- startTime), initCost);
-446return null;
-447  }
-448
-449  /**
-450   * update costs to JMX
-451   */
-452  private void 
updateStochasticCosts(TableName tableName, Double overall, Double[] subCosts) 
{
-453if (tableName == null) return;
-454
-455// check if the metricsBalancer is 
MetricsStochasticBalancer before casting
-456if (metricsBalancer instanceof 
MetricsStochasticBalancer) {
-457  MetricsStochasticBalancer balancer 
= (MetricsStochasticBalancer) metricsBalancer;
-458  // overall cost
-459  
balancer.updateStochasticCost(tableName.getNameAsString(),
-460"Overall", "Overall cost", 
overall);
-461
-462  // each cost function
-463  for (int i = 0; i  
costFunctions.length; i++) {
-464CostFunction costFunction = 
costFunctions[i];
-465String costFunctionName = 
costFunction.getClass().getSimpleName();
-466Double costPercent = (overall == 
0) ? 0 : (subCosts[i] / overall);
-467// TODO: cost function may need a 
specific description
-468
balancer.updateStochasticCost(tableName.getNameAsString(), costFunctionName,
-469  "The percent of " + 
costFunctionName, costPercent);
-470  }
-471}
-472  }
-473
-474  private String functionCost() {
-475

[32/51] [partial] hbase-site git commit: Published site at 7adf590106826b9e4432cfeee06acdc0ccff8c6e.

2018-10-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/425db230/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
index e4dc134..2945c58 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
@@ -93,157 +93,163 @@
 085boolean hasNext();
 086
 087/**
-088 * @return true if the iterator next 
element is a completed procedure.
-089 */
-090boolean isNextFinished();
-091
-092/**
-093 * Skip the next procedure
-094 */
-095void skipNext();
-096
-097/**
-098 * Returns the next procedure in the 
iteration.
-099 * @throws IOException if there was 
an error fetching/deserializing the procedure
-100 * @return the next procedure in the 
iteration.
-101 */
-102@SuppressWarnings("rawtypes")
-103Procedure next() throws 
IOException;
-104  }
-105
-106  /**
-107   * Interface passed to the 
ProcedureStore.load() method to handle the store-load events.
-108   */
-109  public interface ProcedureLoader {
-110/**
-111 * Called by ProcedureStore.load() to 
notify about the maximum proc-id in the store.
-112 * @param maxProcId the highest 
proc-id in the store
-113 */
-114void setMaxProcId(long maxProcId);
-115
+088 * Calling this method does not need 
to converting the protobuf message to the Procedure class,
+089 * so if it returns true we can call 
{@link #skipNext()} to skip the procedure without
+090 * deserializing. This could increase 
the performance.
+091 * @return true if the iterator next 
element is a completed procedure.
+092 */
+093boolean isNextFinished();
+094
+095/**
+096 * Skip the next procedure
+097 * p/
+098 * This method is used to skip the 
deserializing of the procedure to increase performance, as
+099 * when calling next we need to 
convert the protobuf message to the Procedure class.
+100 */
+101void skipNext();
+102
+103/**
+104 * Returns the next procedure in the 
iteration.
+105 * @throws IOException if there was 
an error fetching/deserializing the procedure
+106 * @return the next procedure in the 
iteration.
+107 */
+108@SuppressWarnings("rawtypes")
+109Procedure next() throws 
IOException;
+110  }
+111
+112  /**
+113   * Interface passed to the 
ProcedureStore.load() method to handle the store-load events.
+114   */
+115  public interface ProcedureLoader {
 116/**
-117 * Called by the 
ProcedureStore.load() every time a set of procedures are ready to be 
executed.
-118 * The ProcedureIterator passed to 
the method, has the procedure sorted in replay-order.
-119 * @param procIter iterator over the 
procedures ready to be added to the executor.
-120 */
-121void load(ProcedureIterator procIter) 
throws IOException;
-122
-123/**
-124 * Called by the 
ProcedureStore.load() in case we have procedures not-ready to be added to
-125 * the executor, which probably means 
they are corrupted since some information/link is missing.
-126 * @param procIter iterator over the 
procedures not ready to be added to the executor, corrupted
-127 */
-128void 
handleCorrupted(ProcedureIterator procIter) throws IOException;
-129  }
-130
-131  /**
-132   * Add the listener to the notification 
list.
-133   * @param listener The 
AssignmentListener to register
-134   */
-135  void 
registerListener(ProcedureStoreListener listener);
+117 * Called by ProcedureStore.load() to 
notify about the maximum proc-id in the store.
+118 * @param maxProcId the highest 
proc-id in the store
+119 */
+120void setMaxProcId(long maxProcId);
+121
+122/**
+123 * Called by the 
ProcedureStore.load() every time a set of procedures are ready to be 
executed.
+124 * The ProcedureIterator passed to 
the method, has the procedure sorted in replay-order.
+125 * @param procIter iterator over the 
procedures ready to be added to the executor.
+126 */
+127void load(ProcedureIterator procIter) 
throws IOException;
+128
+129/**
+130 * Called by the 
ProcedureStore.load() in case we have procedures not-ready to be added to
+131 * the executor, which probably means 
they are corrupted since some information/link is missing.
+132 * @param procIter iterator over the 
procedures not ready to be added to the executor, corrupted
+133 */
+134void 
handleCorrupted(ProcedureIterator procIter) throws IOException;
+135  }
 136
 137  /**
-138   * Remove the listener from the 
notification list.
-139   * 

[32/51] [partial] hbase-site git commit: Published site at 5fbb227deb365fe812d433fe39b85ac4b0ddee20.

2018-10-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
index 37eb97f..d0a08f6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
@@ -519,7 +519,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doExecute,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getProc
 Name, getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 incChildrenLatch,
 isBypass, isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId, setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure, shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit, updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 doExecute,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHa
 shCode, getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 incChildrenLatch, isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure, setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner, setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout, setTimeoutFailure,
 shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish, updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
index f3fb747..777b30b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
@@ -134,7 +134,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class TransitRegionStateProcedure
+public class TransitRegionStateProcedure
 extends AbstractStateMachineRegionProcedureorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionStateTransitionState
 The procedure to deal with the state transition of a 
region. A region with a TRSP in place is
  called RIT, i.e, RegionInTransition.
@@ -288,7 +288,7 @@ extends TransitRegionStateProcedure()
 
 
-private 
+protected 
 TransitRegionStateProcedure(MasterProcedureEnvenv,
RegionInfohri,
ServerNameassignCandidate,
@@ -514,7 +514,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doExecute,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getProc
 Name, getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 incChildrenLatch,
 isBypass, isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId, setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 shouldWaitClientAck, skipPersistence,
 toString,
 

[32/51] [partial] hbase-site git commit: Published site at 821e4d7de2d576189f4288d1c2acf9e9a9471f5c.

2018-10-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/323b17d9/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
index ecd1970..be5c3fc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/BitSetNode.html
@@ -28,404 +28,405 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.List;
-023import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.DeleteState;
-024import 
org.apache.yetus.audience.InterfaceAudience;
-025
-026import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-027
-028/**
-029 * A bitmap which can grow/merge with 
other {@link BitSetNode} (if certain conditions are met).
-030 * Boundaries of bitmap are aligned to 
multiples of {@link BitSetNode#BITS_PER_WORD}. So the range
-031 * of a {@link BitSetNode} is from [x * 
K, y * K) where x and y are integers, y  x and K is
-032 * BITS_PER_WORD.
-033 * p/
-034 * We have two main bit sets to describe 
the state of procedures, the meanings are:
-035 *
-036 * pre
-037 *  --
-038 * | modified | deleted |  meaning
-039 * | 0|   0 |  proc exists, 
but hasn't been updated since last resetUpdates().
-040 * | 1|   0 |  proc was 
updated (but not deleted).
-041 * | 1|   1 |  proc was 
deleted.
-042 * | 0|   1 |  proc doesn't 
exist (maybe never created, maybe deleted in past).
-043 * --
-044 * /pre
-045 *
-046 * The meaning of modified is that, we 
have modified the state of the procedure, no matter insert,
-047 * update, or delete. And if it is an 
insert or update, we will set the deleted to 0, if not we will
-048 * set the delete to 1.
-049 * p/
-050 * For a non-partial BitSetNode, the 
initial modified value is 0 and deleted value is 1. For the
-051 * partial one, the initial modified 
value is 0 and the initial deleted value is also 0. In
-052 * {@link #unsetPartialFlag()} we will 
reset the deleted to 1 if it is not modified.
-053 */
-054@InterfaceAudience.Private
-055class BitSetNode {
-056  private static final long WORD_MASK = 
0xL;
-057  private static final int 
ADDRESS_BITS_PER_WORD = 6;
-058  private static final int BITS_PER_WORD 
= 1  ADDRESS_BITS_PER_WORD;
-059  private static final int MAX_NODE_SIZE 
= 1  ADDRESS_BITS_PER_WORD;
-060
-061  /**
-062   * Mimics {@link 
ProcedureStoreTracker#partial}. It will effect how we fill the new deleted 
bits
-063   * when growing.
-064   */
-065  private boolean partial;
-066
-067  /**
-068   * Set of procedures which have been 
modified since last {@link #resetModified()}. Useful to track
-069   * procedures which have been modified 
since last WAL write.
-070   */
-071  private long[] modified;
-072
-073  /**
-074   * Keeps track of procedure ids which 
belong to this bitmap's range and have been deleted. This
-075   * represents global state since it's 
not reset on WAL rolls.
-076   */
-077  private long[] deleted;
-078  /**
-079   * Offset of bitmap i.e. procedure id 
corresponding to first bit.
-080   */
-081  private long start;
-082
-083  public void dump() {
-084System.out.printf("%06d:%06d min=%d 
max=%d%n", getStart(), getEnd(), getActiveMinProcId(),
-085  getActiveMaxProcId());
-086System.out.println("Modified:");
-087for (int i = 0; i  
modified.length; ++i) {
-088  for (int j = 0; j  
BITS_PER_WORD; ++j) {
-089System.out.print((modified[i] 
 (1L  j)) != 0 ? "1" : "0");
-090  }
-091  System.out.println(" " + i);
-092}
-093System.out.println();
-094System.out.println("Delete:");
-095for (int i = 0; i  
deleted.length; ++i) {
-096  for (int j = 0; j  
BITS_PER_WORD; ++j) {
-097System.out.print((deleted[i] 
 (1L  j)) != 0 ? "1" : "0");
-098  }
-099  System.out.println(" " + i);
-100}
-101System.out.println();
-102  }
-103
-104  public BitSetNode(long procId, boolean 
partial) {
-105start = alignDown(procId);
-106
-107int count = 1;
-108modified = new long[count];
-109deleted = new long[count];
-110if (!partial) {
-111  Arrays.fill(deleted, WORD_MASK);
-112}
-113
-114this.partial = partial;
-115updateState(procId, false);
-116  }
-117
-118  public 
BitSetNode(ProcedureProtos.ProcedureStoreTracker.TrackerNode data) {
-119start = data.getStartId();
-120int size = data.getUpdatedCount();
-121assert size == 
data.getDeletedCount();
-122modified = new long[size];
-123deleted = new long[size];
-124for (int i = 0; i  size; ++i) {
-125  modified[i] = data.getUpdated(i);
-126  deleted[i] = data.getDeleted(i);
-127}

[32/51] [partial] hbase-site git commit: Published site at fa5fa6ecdd071b72b58971058ff3ab9d28c3e709.

2018-10-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d1341859/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.MasterProcedureStoreListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.MasterProcedureStoreListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.MasterProcedureStoreListener.html
deleted file mode 100644
index 810ec00..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.MasterProcedureStoreListener.html
+++ /dev/null
@@ -1,244 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package 
org.apache.hadoop.hbase.master.procedure;
-020
-021import java.io.IOException;
-022import 
org.apache.hadoop.conf.Configuration;
-023import org.apache.hadoop.fs.FileSystem;
-024import org.apache.hadoop.fs.Path;
-025import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-026import 
org.apache.hadoop.hbase.ipc.RpcServer;
-027import 
org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-028import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-029import 
org.apache.hadoop.hbase.master.MasterServices;
-030import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-031import 
org.apache.hadoop.hbase.master.replication.ReplicationPeerManager;
-032import 
org.apache.hadoop.hbase.procedure2.Procedure;
-033import 
org.apache.hadoop.hbase.procedure2.ProcedureEvent;
-034import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
-035import 
org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
-036import 
org.apache.hadoop.hbase.security.Superusers;
-037import 
org.apache.hadoop.hbase.security.User;
-038import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-039import 
org.apache.hadoop.hbase.util.FSUtils;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041import 
org.apache.yetus.audience.InterfaceStability;
-042import org.slf4j.Logger;
-043import org.slf4j.LoggerFactory;
-044
-045@InterfaceAudience.Private
-046@InterfaceStability.Evolving
-047public class MasterProcedureEnv 
implements ConfigurationObserver {
-048  private static final Logger LOG = 
LoggerFactory.getLogger(MasterProcedureEnv.class);
-049
-050  @InterfaceAudience.Private
-051  public static class 
WALStoreLeaseRecovery implements WALProcedureStore.LeaseRecovery {
-052private final MasterServices 
master;
-053
-054public WALStoreLeaseRecovery(final 
MasterServices master) {
-055  this.master = master;
-056}
-057
-058@Override
-059public void recoverFileLease(final 
FileSystem fs, final Path path) throws IOException {
-060  final Configuration conf = 
master.getConfiguration();
-061  final FSUtils fsUtils = 
FSUtils.getInstance(fs, conf);
-062  fsUtils.recoverFileLease(fs, path, 
conf, new CancelableProgressable() {
-063@Override
-064public boolean progress() {
-065  LOG.debug("Recover Procedure 
Store log lease: " + path);
-066  return isRunning();
-067}
-068  });
-069}
-070
-071private boolean isRunning() {
-072  return !master.isStopped() 
 !master.isStopping()  !master.isAborted();
-073}
-074  }
-075
-076  @InterfaceAudience.Private
-077  public static class 
MasterProcedureStoreListener
-078  implements 
ProcedureStore.ProcedureStoreListener {
-079private final MasterServices 
master;
-080
-081public 
MasterProcedureStoreListener(final MasterServices master) {
-082  this.master = master;
-083}
-084
-085@Override
-086public void postSync() {
-087  // no-op
-088}
-089
-090@Override
-091public void abortProcess() {
-092  master.abort("The Procedure Store 
lost the lease", null);
-093}
-094  }
-095
-096  private final RSProcedureDispatcher 
remoteDispatcher;
-097  private final MasterProcedureScheduler 
procSched;
-098  private final MasterServices master;
-099
-100  public MasterProcedureEnv(final 
MasterServices master) 

[32/51] [partial] hbase-site git commit: Published site at 6bc7089f9e0793efc9bdd46a84f5ccd9bc4579ad.

2018-09-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
index b5ac659..376bcd4 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
@@ -243,7 +243,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 doAcquireLock, doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId, getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner, hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading,
 releaseLock,
 removeStackIndex,
 restoreLo
 ck, setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout, setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails,
 toStringSimpleSB,
 toStringState, tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 waitInitialized,
 wasExecuted
+acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 bypass,
 compareTo,
 completionCleanup,
 doAcquireLock, doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId, getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner, hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isBypass,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 lockedWhenLoading,
 needPersistence,
 releaseLock,
 removeStackIndex, resetPersistence,
 restoreLock,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState, 
setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 skipPersistence,
 toString,
 toStringClass,
 toStringClassDetails, toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 waitInitialized, wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureUtil.CompatStateSerializer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureUtil.CompatStateSerializer.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureUtil.CompatStateSerializer.html
index d50a0d5..3c284d3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureUtil.CompatStateSerializer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureUtil.CompatStateSerializer.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class ProcedureUtil.CompatStateSerializer
+private static class ProcedureUtil.CompatStateSerializer
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ProcedureStateSerializer
 A serializer (deserializer) for those Procedures which were 
serialized
@@ -212,7 +212,7 @@ implements 
 
 inputStream
-privatehttps://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true;
 title="class or interface in java.io">InputStream inputStream
+privatehttps://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true;
 title="class or interface in java.io">InputStream inputStream
 
 
 
@@ -229,7 +229,7 @@ implements 
 
 CompatStateSerializer
-publicCompatStateSerializer(https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true;
 title="class or interface in java.io">InputStreaminputStream)
+publicCompatStateSerializer(https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true;
 title="class or interface in 

[32/51] [partial] hbase-site git commit: Published site at d7e08317d2f214e4cca7b67578aba0ed7a567d54.

2018-09-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37cf49a6/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index fb9958e..fc0e360 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HRegion.RegionScannerImpl
+class HRegion.RegionScannerImpl
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RegionScanner, Shipper, RpcCallback
 RegionScannerImpl is used to combine scanners from multiple 
Stores (aka column families).
@@ -425,7 +425,7 @@ implements 
 
 storeHeap
-KeyValueHeap storeHeap
+KeyValueHeap storeHeap
 
 
 
@@ -434,7 +434,7 @@ implements 
 
 joinedHeap
-KeyValueHeap joinedHeap
+KeyValueHeap joinedHeap
 Heap of key-values that are not essential for the provided 
filters and are thus read
  on demand, if on-demand column family loading is enabled.
 
@@ -445,7 +445,7 @@ implements 
 
 joinedContinuationRow
-protectedCell joinedContinuationRow
+protectedCell joinedContinuationRow
 If the joined heap data gathering is interrupted due to 
scan limits, this will
  contain the row for which we are populating the values.
 
@@ -456,7 +456,7 @@ implements 
 
 filterClosed
-privateboolean filterClosed
+privateboolean filterClosed
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 stopRow
-protected finalbyte[] stopRow
+protected finalbyte[] stopRow
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 includeStopRow
-protected finalboolean includeStopRow
+protected finalboolean includeStopRow
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 region
-protected finalHRegion region
+protected finalHRegion region
 
 
 
@@ -492,7 +492,7 @@ implements 
 
 comparator
-protected finalCellComparator comparator
+protected finalCellComparator comparator
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 readPt
-private finallong readPt
+private finallong readPt
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 maxResultSize
-private finallong maxResultSize
+private finallong maxResultSize
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 defaultScannerContext
-private finalScannerContext defaultScannerContext
+private finalScannerContext defaultScannerContext
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 filter
-private finalFilterWrapper filter
+private finalFilterWrapper filter
 
 
 
@@ -545,7 +545,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -561,7 +561,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion,
   longnonceGroup,
@@ -587,7 +587,7 @@ implements 
 
 getRegionInfo
-publicRegionInfogetRegionInfo()
+publicRegionInfogetRegionInfo()
 
 Specified by:
 getRegionInfoin
 interfaceRegionScanner
@@ -602,7 +602,7 @@ implements 
 
 initializeScanners
-protectedvoidinitializeScanners(Scanscan,
+protectedvoidinitializeScanners(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -617,7 +617,7 @@ implements 
 
 initializeKVHeap
-protectedvoidinitializeKVHeap(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
+protectedvoidinitializeKVHeap(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerjoinedScanners,
 HRegionregion)
  throws 

[32/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
index c3ca84f..34a0791 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/RSProcedureHandler.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -328,7 +328,7 @@ extends 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
new file mode 100644
index 000..2eb680c
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/handler/UnassignRegionHandler.html
@@ -0,0 +1,454 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+UnassignRegionHandler (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":9,"i1":10,"i2":10,"i3":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver.handler
+Class 
UnassignRegionHandler
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.executor.EventHandler
+
+
+org.apache.hadoop.hbase.regionserver.handler.UnassignRegionHandler
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">Comparablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable, https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
+
+
+
+@InterfaceAudience.Private
+public class UnassignRegionHandler
+extends EventHandler
+Handles closing of a region on a region server.
+ 
+ Just done the same thing with the old CloseRegionHandler,
 with some modifications on
+ fencing and retrying. But we need to keep the CloseRegionHandler
 as is to keep compatible
+ with the zk less assignment for 1.x, otherwise it is not possible to do 
rolling upgrade.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private boolean
+abort
+
+
+private ServerName
+destination
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+encodedName
+
+
+private static org.slf4j.Logger
+LOG
+
+
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.executor.EventHandler
+eventType,
 seqids,
 server,
 waitingTimeForEvents
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+UnassignRegionHandler(RegionServerServicesserver,
+ https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringencodedName,
+ booleanabort,
+ ServerNamedestination,
+ EventTypeeventType)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+static UnassignRegionHandler
+create(RegionServerServicesserver,
+  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[32/51] [partial] hbase-site git commit: Published site at cd161d976ef47b84e904f2d54bac65d2f3417c2a.

2018-09-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fa1bebf8/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
index a5789e0..93a57cb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
@@ -238,4120 +238,4119 @@
 230 * @see Admin
 231 */
 232@InterfaceAudience.Private
-233@InterfaceStability.Evolving
-234public class HBaseAdmin implements Admin 
{
-235  private static final Logger LOG = 
LoggerFactory.getLogger(HBaseAdmin.class);
-236
-237  private ClusterConnection connection;
-238
-239  private final Configuration conf;
-240  private final long pause;
-241  private final int numRetries;
-242  private final int syncWaitTimeout;
-243  private boolean aborted;
-244  private int operationTimeout;
-245  private int rpcTimeout;
-246
-247  private RpcRetryingCallerFactory 
rpcCallerFactory;
-248  private RpcControllerFactory 
rpcControllerFactory;
-249
-250  private NonceGenerator ng;
-251
-252  @Override
-253  public int getOperationTimeout() {
-254return operationTimeout;
-255  }
-256
-257  HBaseAdmin(ClusterConnection 
connection) throws IOException {
-258this.conf = 
connection.getConfiguration();
-259this.connection = connection;
-260
-261// TODO: receive 
ConnectionConfiguration here rather than re-parsing these configs every time.
-262this.pause = 
this.conf.getLong(HConstants.HBASE_CLIENT_PAUSE,
-263
HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
-264this.numRetries = 
this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-265
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
-266this.operationTimeout = 
this.conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
-267
HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
-268this.rpcTimeout = 
this.conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY,
-269
HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
-270this.syncWaitTimeout = 
this.conf.getInt(
-271  
"hbase.client.sync.wait.timeout.msec", 10 * 6); // 10min
-272
-273this.rpcCallerFactory = 
connection.getRpcRetryingCallerFactory();
-274this.rpcControllerFactory = 
connection.getRpcControllerFactory();
-275
-276this.ng = 
this.connection.getNonceGenerator();
-277  }
-278
-279  @Override
-280  public void abort(String why, Throwable 
e) {
-281// Currently does nothing but throw 
the passed message and exception
-282this.aborted = true;
-283throw new RuntimeException(why, e);
-284  }
-285
-286  @Override
-287  public boolean isAborted() {
-288return this.aborted;
-289  }
-290
-291  @Override
-292  public boolean abortProcedure(final 
long procId, final boolean mayInterruptIfRunning)
-293  throws IOException {
-294return 
get(abortProcedureAsync(procId, mayInterruptIfRunning), this.syncWaitTimeout,
-295  TimeUnit.MILLISECONDS);
-296  }
-297
-298  @Override
-299  public FutureBoolean 
abortProcedureAsync(final long procId, final boolean mayInterruptIfRunning)
-300  throws IOException {
-301Boolean abortProcResponse =
-302executeCallable(new 
MasterCallableAbortProcedureResponse(getConnection(),
-303getRpcControllerFactory()) 
{
-304  @Override
-305  protected AbortProcedureResponse 
rpcCall() throws Exception {
-306AbortProcedureRequest 
abortProcRequest =
-307
AbortProcedureRequest.newBuilder().setProcId(procId).build();
-308return 
master.abortProcedure(getRpcController(), abortProcRequest);
-309  }
-310}).getIsProcedureAborted();
-311return new AbortProcedureFuture(this, 
procId, abortProcResponse);
-312  }
-313
-314  @Override
-315  public ListTableDescriptor 
listTableDescriptors() throws IOException {
-316return 
listTableDescriptors((Pattern)null, false);
-317  }
-318
-319  @Override
-320  public ListTableDescriptor 
listTableDescriptors(Pattern pattern) throws IOException {
-321return listTableDescriptors(pattern, 
false);
-322  }
-323
-324  @Override
-325  public ListTableDescriptor 
listTableDescriptors(Pattern pattern, boolean includeSysTables)
-326  throws IOException {
-327return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-328getRpcControllerFactory()) {
-329  @Override
-330  protected 
ListTableDescriptor rpcCall() throws Exception {
-331GetTableDescriptorsRequest req 
=
-332
RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables);
-333return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
-334req));
-335  }
-336   

[32/51] [partial] hbase-site git commit: Published site at c6a65ba63fce85ac7c4b62b96ef2bbe6c35d2f00.

2018-09-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/293abb17/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
index 6422327..8573476 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
@@ -902,7 +902,7 @@ implements 
 
 createCluster
-protectedBaseLoadBalancer.ClustercreateCluster(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers,
+protectedBaseLoadBalancer.ClustercreateCluster(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers,
  https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionRegionInforegions)
 
 
@@ -912,7 +912,7 @@ implements 
 
 findIdleServers
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamefindIdleServers(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamefindIdleServers(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
 
 
 
@@ -921,7 +921,7 @@ implements 
 
 randomAssignment
-publicServerNamerandomAssignment(RegionInforegionInfo,
+publicServerNamerandomAssignment(RegionInforegionInfo,
https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
 throws HBaseIOException
 Used to assign a single region to a random server.
@@ -943,7 +943,7 @@ implements 
 
 retainAssignment
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforetainAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionInfo,ServerNameregions,
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforetainAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionInfo,ServerNameregions,
  https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
   throws HBaseIOException
 Generates a bulk assignment startup plan, attempting to 
reuse the existing
@@ -976,7 +976,7 @@ implements 
 
 initialize
-publicvoidinitialize()
+publicvoidinitialize()
 throws HBaseIOException
 Description copied from 
interface:LoadBalancer
 Initialize the load balancer. Must be called after 
setters.
@@ -994,7 +994,7 @@ implements 
 
 regionOnline
-publicvoidregionOnline(RegionInforegionInfo,
+publicvoidregionOnline(RegionInforegionInfo,
  ServerNamesn)
 Description copied from 
interface:LoadBalancer
 Marks the region as online at balancer.
@@ -1010,7 +1010,7 @@ implements 
 
 regionOffline
-publicvoidregionOffline(RegionInforegionInfo)
+publicvoidregionOffline(RegionInforegionInfo)
 Description copied from 
interface:LoadBalancer
 Marks the region as offline at balancer.
 
@@ -1025,7 +1025,7 @@ implements 
 
 isStopped
-publicbooleanisStopped()
+publicbooleanisStopped()
 
 Specified by:
 isStoppedin
 interfaceStoppable
@@ -1040,7 +1040,7 @@ implements 
 
 stop
-publicvoidstop(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy)
+publicvoidstop(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy)
 Description copied from 
interface:Stoppable
 Stop this service.
  Implementers should favor logging errors over throwing 

[32/51] [partial] hbase-site git commit: Published site at 7c1fad4992a169a35b4457e6f4afcb30d04406e9.

2018-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
index 45760ce..0750888 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -382,22 +382,30 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+boolean
+bypassProcedure(longid,
+   longlockWait,
+   booleanforce)
+Bypass a procedure.
+
+
+
 private void
 countDownChildren(RootProcedureStateTEnvironmentprocStack,
  ProcedureTEnvironmentprocedure)
 
-
+
 NonceKey
 createNonceKey(longnonceGroup,
   longnonce)
 Create a NoneKey from the specified nonceGroup and 
nonce.
 
 
-
+
 private void
 execCompletionCleanup(ProcedureTEnvironmentproc)
 
-
+
 private void
 execProcedure(RootProcedureStateTEnvironmentprocStack,
  ProcedureTEnvironmentprocedure)
@@ -407,228 +415,228 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
   If the procedure execution didn't fail (i.e.
 
 
-
+
 private void
 executeProcedure(ProcedureTEnvironmentproc)
 
-
+
 private Procedure.LockState
 executeRollback(longrootProcId,
RootProcedureStateTEnvironmentprocStack)
 Execute the rollback of the full procedure stack.
 
 
-
+
 private Procedure.LockState
 executeRollback(ProcedureTEnvironmentproc)
 Execute the rollback of the procedure step.
 
 
-
+
 int
 getActiveExecutorCount()
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionProcedureTEnvironment
 getActiveProceduresNoCopy()
 Should only be used when starting up, where the procedure 
workers have not been started.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getActiveProcIds()
 
-
+
 int
 getCorePoolSize()
 
-
+
 TEnvironment
 getEnvironment()
 
-
+
 long
 getKeepAliveTime(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnittimeUnit)
 
-
+
 protected long
 getLastProcId()
 
-
+
 T extends ProcedureTEnvironmentT
 getProcedure(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTclazz,
 longprocId)
 
-
+
 ProcedureTEnvironment
 getProcedure(longprocId)
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedureTEnvironment
 getProcedures()
 Get procedures.
 
 
-
+
 (package private) RootProcedureStateTEnvironment
 getProcStack(longrootProcId)
 
-
+
 ProcedureTEnvironment
 getResult(longprocId)
 
-
+
 ProcedureTEnvironment
 getResultOrProcedure(longprocId)
 
-
+
 (package private) https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getRootProcedureId(ProcedureTEnvironmentproc)
 
-
+
 (package private) ProcedureScheduler
 getScheduler()
 
-
+
 

[32/51] [partial] hbase-site git commit: Published site at 3afe9fb7e6ebfa71187cbe131558a83fae61cecd.

2018-08-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseHbck.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseHbck.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseHbck.html
new file mode 100644
index 000..370686c
--- /dev/null
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseHbck.html
@@ -0,0 +1,167 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.client;
+019
+020import java.io.IOException;
+021import 
org.apache.hadoop.conf.Configuration;
+022import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+023import 
org.apache.yetus.audience.InterfaceAudience;
+024import org.slf4j.Logger;
+025import org.slf4j.LoggerFactory;
+026import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+027import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+028import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;
+029import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService.BlockingInterface;
+030
+031
+032/**
+033 * Use {@link 
ClusterConnection#getHbck()} to obtain an instance of {@link Hbck} instead of
+034 * constructing
+035 * an HBaseHbck directly. This will be 
mostly used by hbck tool.
+036 *
+037 * pConnection should be an 
iunmanaged/i connection obtained via
+038 * {@link 
ConnectionFactory#createConnection(Configuration)}./p
+039 *
+040 * pAn instance of this class is 
lightweight and not-thread safe. A new instance should be created
+041 * by each thread. Pooling or caching of 
the instance is not recommended./p
+042 *
+043 * @see ConnectionFactory
+044 * @see ClusterConnection
+045 * @see Hbck
+046 */
+047@InterfaceAudience.Private
+048public class HBaseHbck implements Hbck 
{
+049  private static final Logger LOG = 
LoggerFactory.getLogger(HBaseHbck.class);
+050
+051  private boolean aborted;
+052  private final BlockingInterface hbck;
+053
+054  private RpcControllerFactory 
rpcControllerFactory;
+055
+056  HBaseHbck(ClusterConnection connection, 
BlockingInterface hbck) throws IOException {
+057this.hbck = hbck;
+058this.rpcControllerFactory = 
connection.getRpcControllerFactory();
+059  }
+060
+061  @Override
+062  public void close() throws IOException 
{
+063// currently does nothing
+064  }
+065
+066  @Override
+067  public void abort(String why, Throwable 
e) {
+068this.aborted = true;
+069// Currently does nothing but throw 
the passed message and exception
+070throw new RuntimeException(why, e);
+071  }
+072
+073  @Override
+074  public boolean isAborted() {
+075return this.aborted;
+076  }
+077
+078  /**
+079   * NOTE: This is a dangerous action, as 
existing running procedures for the table or regions
+080   * which belong to the table may get 
confused.
+081   */
+082  @Override
+083  public TableState 
setTableStateInMeta(TableState state) throws IOException {
+084try {
+085  GetTableStateResponse response = 
hbck.setTableStateInMeta(
+086  
rpcControllerFactory.newController(),
+087  
RequestConverter.buildSetTableStateInMetaRequest(state));
+088  return 
TableState.convert(state.getTableName(), response.getTableState());
+089} catch (ServiceException se) {
+090  LOG.debug("ServiceException while 
updating table state in meta. table={}, state={}",
+091  state.getTableName(), 
state.getState());
+092  throw new IOException(se);
+093}
+094  }
+095}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/devapidocs/src-html/org/apache/hadoop/hbase/client/Hbck.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Hbck.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Hbck.html
new file 

[32/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
index a9629dd..1e6a2bb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
@@ -31,430 +31,446 @@
 023import java.util.ArrayList;
 024import java.util.Collections;
 025import java.util.Iterator;
-026import java.util.Optional;
-027import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.HConstants;
-030import org.apache.hadoop.hbase.Tag;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033import 
org.apache.hadoop.hbase.util.ClassSize;
-034import 
org.apache.yetus.audience.InterfaceAudience;
-035
-036import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-037import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-039
-040/**
-041 * A filter that will only return the key 
component of each KV (the value will
-042 * be rewritten as empty).
-043 * p
-044 * This filter can be used to grab all of 
the keys without having to also grab
-045 * the values.
-046 */
-047@InterfaceAudience.Public
-048public class KeyOnlyFilter extends 
FilterBase {
-049
-050  boolean lenAsVal;
-051  public KeyOnlyFilter() { this(false); 
}
-052  public KeyOnlyFilter(boolean lenAsVal) 
{ this.lenAsVal = lenAsVal; }
-053
-054  @Override
-055  public boolean filterRowKey(Cell cell) 
throws IOException {
-056// Impl in FilterBase might do 
unnecessary copy for Off heap backed Cells.
-057return false;
-058  }
-059
-060  @Override
-061  public Cell transformCell(Cell cell) 
{
-062return createKeyOnlyCell(cell);
-063  }
-064
-065  private Cell createKeyOnlyCell(Cell c) 
{
-066if (c instanceof 
ByteBufferExtendedCell) {
-067  return new 
KeyOnlyByteBufferExtendedCell((ByteBufferExtendedCell) c, lenAsVal);
-068} else {
-069  return new KeyOnlyCell(c, 
lenAsVal);
-070}
-071  }
-072
-073  @Deprecated
-074  @Override
-075  public ReturnCode filterKeyValue(final 
Cell ignored) throws IOException {
-076return filterCell(ignored);
-077  }
-078
-079  @Override
-080  public ReturnCode filterCell(final Cell 
ignored) throws IOException {
-081return ReturnCode.INCLUDE;
-082  }
-083
-084  public static Filter 
createFilterFromArguments(ArrayListbyte [] filterArguments) {
-085
Preconditions.checkArgument((filterArguments.isEmpty() || 
filterArguments.size() == 1),
-086
"Expected: 0 or 1 but got: %s", filterArguments.size());
-087KeyOnlyFilter filter = new 
KeyOnlyFilter();
-088if (filterArguments.size() == 1) {
-089  filter.lenAsVal = 
ParseFilter.convertByteArrayToBoolean(filterArguments.get(0));
-090}
-091return filter;
-092  }
-093
-094  /**
-095   * @return The filter serialized using 
pb
-096   */
-097  @Override
-098  public byte [] toByteArray() {
-099FilterProtos.KeyOnlyFilter.Builder 
builder =
-100  
FilterProtos.KeyOnlyFilter.newBuilder();
-101builder.setLenAsVal(this.lenAsVal);
-102return 
builder.build().toByteArray();
-103  }
-104
-105  /**
-106   * @param pbBytes A pb serialized 
{@link KeyOnlyFilter} instance
-107   * @return An instance of {@link 
KeyOnlyFilter} made from codebytes/code
-108   * @throws DeserializationException
-109   * @see #toByteArray
-110   */
-111  public static KeyOnlyFilter 
parseFrom(final byte [] pbBytes)
-112  throws DeserializationException {
-113FilterProtos.KeyOnlyFilter proto;
-114try {
-115  proto = 
FilterProtos.KeyOnlyFilter.parseFrom(pbBytes);
-116} catch 
(InvalidProtocolBufferException e) {
-117  throw new 
DeserializationException(e);
-118}
-119return new 
KeyOnlyFilter(proto.getLenAsVal());
-120  }
-121
-122  /**
-123   * @param o the other filter to compare 
with
-124   * @return true if and only if the 
fields of the filter that are serialized
-125   * are equal to the corresponding 
fields in other.  Used for testing.
-126   */
-127  @Override
-128  boolean areSerializedFieldsEqual(Filter 
o) {
-129if (o == this) return true;
-130if (!(o instanceof KeyOnlyFilter)) 
return false;
-131
-132KeyOnlyFilter other = 
(KeyOnlyFilter)o;
-133return this.lenAsVal == 
other.lenAsVal;
-134  }
-135
-136  static class KeyOnlyCell implements 
Cell {
-137private Cell cell;
-138private boolean lenAsVal;
-139
-140public KeyOnlyCell(Cell c, boolean 
lenAsVal) {
-141  this.cell = c;
-142  this.lenAsVal = lenAsVal;
-143}
-144

[32/51] [partial] hbase-site git commit: Published site at 6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4.

2018-08-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
index bf6738e..a62d84e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
@@ -74,569 +74,568 @@
 066
 067  private static final Logger LOG = 
LoggerFactory.getLogger(CompactingMemStore.class);
 068  private HStore store;
-069  private RegionServicesForStores 
regionServices;
-070  private CompactionPipeline pipeline;
-071  protected MemStoreCompactor 
compactor;
-072
-073  private long inmemoryFlushSize;   
// the threshold on active size for in-memory flush
-074  private final AtomicBoolean 
inMemoryCompactionInProgress = new AtomicBoolean(false);
-075
-076  // inWalReplay is true while we are 
synchronously replaying the edits from WAL
-077  private boolean inWalReplay = false;
-078
-079  @VisibleForTesting
-080  protected final AtomicBoolean 
allowCompaction = new AtomicBoolean(true);
-081  private boolean compositeSnapshot = 
true;
-082
-083  /**
-084   * Types of indexes (part of immutable 
segments) to be used after flattening,
-085   * compaction, or merge are applied.
-086   */
-087  public enum IndexType {
-088CSLM_MAP,   // ConcurrentSkipLisMap
-089ARRAY_MAP,  // CellArrayMap
-090CHUNK_MAP   // CellChunkMap
-091  }
-092
-093  private IndexType indexType = 
IndexType.ARRAY_MAP;  // default implementation
-094
-095  public static final long DEEP_OVERHEAD 
= ClassSize.align( AbstractMemStore.DEEP_OVERHEAD
-096  + 7 * ClassSize.REFERENCE // 
Store, RegionServicesForStores, CompactionPipeline,
-097  // MemStoreCompactor, 
inMemoryCompactionInProgress,
-098  // allowCompaction, indexType
-099  + Bytes.SIZEOF_LONG   // 
inmemoryFlushSize
-100  + 2 * Bytes.SIZEOF_BOOLEAN// 
compositeSnapshot and inWalReplay
-101  + 2 * ClassSize.ATOMIC_BOOLEAN// 
inMemoryCompactionInProgress and allowCompaction
-102  + CompactionPipeline.DEEP_OVERHEAD 
+ MemStoreCompactor.DEEP_OVERHEAD);
-103
-104  public CompactingMemStore(Configuration 
conf, CellComparator c,
-105  HStore store, 
RegionServicesForStores regionServices,
-106  MemoryCompactionPolicy 
compactionPolicy) throws IOException {
-107super(conf, c);
-108this.store = store;
-109this.regionServices = 
regionServices;
-110this.pipeline = new 
CompactionPipeline(getRegionServices());
-111this.compactor = 
createMemStoreCompactor(compactionPolicy);
-112if 
(conf.getBoolean(MemStoreLAB.USEMSLAB_KEY, MemStoreLAB.USEMSLAB_DEFAULT)) {
-113  // if user requested to work with 
MSLABs (whether on- or off-heap), then the
-114  // immutable segments are going to 
use CellChunkMap as their index
-115  indexType = IndexType.CHUNK_MAP;
-116} else {
-117  indexType = IndexType.ARRAY_MAP;
-118}
-119// initialization of the flush size 
should happen after initialization of the index type
-120// so do not transfer the following 
method
-121initInmemoryFlushSize(conf);
-122LOG.info("Store={}, in-memory flush 
size threshold={}, immutable segments index type={}, " +
-123"compactor={}", 
this.store.getColumnFamilyName(),
-124
StringUtils.byteDesc(this.inmemoryFlushSize), this.indexType,
-125(this.compactor == null? "NULL": 
this.compactor.toString()));
-126  }
-127
-128  @VisibleForTesting
-129  protected MemStoreCompactor 
createMemStoreCompactor(MemoryCompactionPolicy compactionPolicy)
-130  throws IllegalArgumentIOException 
{
-131return new MemStoreCompactor(this, 
compactionPolicy);
-132  }
-133
-134  private void 
initInmemoryFlushSize(Configuration conf) {
-135double factor = 0;
-136long memstoreFlushSize = 
getRegionServices().getMemStoreFlushSize();
-137int numStores = 
getRegionServices().getNumStores();
-138if (numStores = 1) {
-139  // Family number might also be zero 
in some of our unit test case
-140  numStores = 1;
-141}
-142factor = 
conf.getDouble(IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.0);
-143if(factor != 0.0) {
-144  // multiply by a factor (the same 
factor for all index types)
-145  inmemoryFlushSize = (long) (factor 
* memstoreFlushSize) / numStores;
-146} else {
-147  inmemoryFlushSize = 
IN_MEMORY_FLUSH_MULTIPLIER *
-148  
conf.getLong(MemStoreLAB.CHUNK_SIZE_KEY, MemStoreLAB.CHUNK_SIZE_DEFAULT);
-149  inmemoryFlushSize -= 
ChunkCreator.SIZEOF_CHUNK_HEADER;
-150}
-151  }
-152
-153  /**
-154   * @return Total memory occupied by 
this MemStore. This won't include any size occupied by the
-155   * 

[32/51] [partial] hbase-site git commit: Published site at 63f2d3cbdc8151f5f61f33e0a078c51b9ac076a5.

2018-08-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionRemoteProcedureBase.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionRemoteProcedureBase.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionRemoteProcedureBase.html
new file mode 100644
index 000..825cd34
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionRemoteProcedureBase.html
@@ -0,0 +1,682 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+RegionRemoteProcedureBase (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.master.assignment
+Class 
RegionRemoteProcedureBase
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.procedure2.ProcedureMasterProcedureEnv
+
+
+org.apache.hadoop.hbase.master.assignment.RegionRemoteProcedureBase
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableProcedureMasterProcedureEnv, TableProcedureInterface, RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,ServerName
+
+
+Direct Known Subclasses:
+CloseRegionProcedure, OpenRegionProcedure
+
+
+
+@InterfaceAudience.Private
+public abstract class RegionRemoteProcedureBase
+extends ProcedureMasterProcedureEnv
+implements TableProcedureInterface, RemoteProcedureDispatcher.RemoteProcedureMasterProcedureEnv,ServerName
+The base class for the remote procedures used to open/close 
a region.
+ 
+ Notice that here we do not care about the result of the remote call, if the 
remote call is
+ finished, either succeeded or not, we will always finish the procedure. The 
parent procedure
+ should take care of the result and try to reschedule if the result is not 
good.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.procedure2.Procedure
+Procedure.LockState
+
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hadoop.hbase.master.procedure.TableProcedureInterface
+TableProcedureInterface.TableOperationType
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private boolean
+dispatched
+
+
+private static org.slf4j.Logger
+LOG
+
+
+protected RegionInfo
+region
+
+
+private ServerName
+targetServer
+
+
+
+
+
+
+Fields inherited from classorg.apache.hadoop.hbase.procedure2.Procedure
+NO_PROC_ID,
 NO_TIMEOUT
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+protected 
+RegionRemoteProcedureBase()
+
+
+protected 
+RegionRemoteProcedureBase(RegionInforegion,
+ ServerNametargetServer)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+protected boolean
+abort(MasterProcedureEnvenv)
+The abort() call is asynchronous and each procedure must 
decide how to deal
+ with it, if they want to be abortable.
+
+
+
+protected void
+deserializeStateData(ProcedureStateSerializerserializer)
+Called on store load to allow the user to decode the 
previously serialized
+ state.
+
+
+
+protected ProcedureMasterProcedureEnv[]
+execute(MasterProcedureEnvenv)
+The main code of the procedure.
+
+
+
+private ProcedureEvent?
+getRegionEvent(MasterProcedureEnvenv)
+
+
+TableName
+getTableName()
+
+
+void
+remoteCallFailed(MasterProcedureEnvenv,
+ServerNameremote,
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptionexception)
+Called when the executeProcedure 

[32/51] [partial] hbase-site git commit: Published site at 092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.

2018-08-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellImporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellImporter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellImporter.html
index 39170f0..7859ebc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellImporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellImporter.html
@@ -230,564 +230,567 @@
 222  }
 223}
 224  } catch (InterruptedException e) 
{
-225e.printStackTrace();
-226  }
-227}
-228
-229@Override
-230public void setup(Context context) 
throws IOException {
-231  cfRenameMap = 
createCfRenameMap(context.getConfiguration());
-232  filter = 
instantiateFilter(context.getConfiguration());
-233  int reduceNum = 
context.getNumReduceTasks();
-234  Configuration conf = 
context.getConfiguration();
-235  TableName tableName = 
TableName.valueOf(context.getConfiguration().get(TABLE_NAME));
-236  try (Connection conn = 
ConnectionFactory.createConnection(conf);
-237  RegionLocator regionLocator = 
conn.getRegionLocator(tableName)) {
-238byte[][] startKeys = 
regionLocator.getStartKeys();
-239if (startKeys.length != 
reduceNum) {
-240  throw new IOException("Region 
split after job initialization");
-241}
-242CellWritableComparable[] 
startKeyWraps =
-243new 
CellWritableComparable[startKeys.length - 1];
-244for (int i = 1; i  
startKeys.length; ++i) {
-245  startKeyWraps[i - 1] =
-246  new 
CellWritableComparable(KeyValueUtil.createFirstOnRow(startKeys[i]));
-247}
-248
CellWritableComparablePartitioner.START_KEYS = startKeyWraps;
-249  }
-250}
-251  }
-252
-253  /**
-254   * A mapper that just writes out 
KeyValues.
-255   */
-256  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_COMPARETO_USE_OBJECT_EQUALS",
-257  justification="Writables are going 
away and this has been this way forever")
-258  public static class CellImporter 
extends TableMapperImmutableBytesWritable, Cell {
-259private Mapbyte[], byte[] 
cfRenameMap;
-260private Filter filter;
-261private static final Logger LOG = 
LoggerFactory.getLogger(CellImporter.class);
-262
-263/**
-264 * @param row  The current table row 
key.
-265 * @param value  The columns.
-266 * @param context  The current 
context.
-267 * @throws IOException When something 
is broken with the data.
-268 */
-269@Override
-270public void 
map(ImmutableBytesWritable row, Result value,
-271  Context context)
-272throws IOException {
-273  try {
-274if (LOG.isTraceEnabled()) {
-275  LOG.trace("Considering the 
row."
-276  + Bytes.toString(row.get(), 
row.getOffset(), row.getLength()));
-277}
-278if (filter == null
-279|| 
!filter.filterRowKey(PrivateCellUtil.createFirstOnRow(row.get(), 
row.getOffset(),
-280(short) 
row.getLength( {
-281  for (Cell kv : 
value.rawCells()) {
-282kv = filterKv(filter, kv);
-283// skip if we filtered it 
out
-284if (kv == null) continue;
-285context.write(row, new 
MapReduceExtendedCell(convertKv(kv, cfRenameMap)));
-286  }
-287}
-288  } catch (InterruptedException e) 
{
-289e.printStackTrace();
-290  }
-291}
-292
-293@Override
-294public void setup(Context context) 
{
-295  cfRenameMap = 
createCfRenameMap(context.getConfiguration());
-296  filter = 
instantiateFilter(context.getConfiguration());
-297}
-298  }
-299
-300  /**
-301   * Write table content out to files in 
hdfs.
-302   */
-303  public static class Importer extends 
TableMapperImmutableBytesWritable, Mutation {
-304private Mapbyte[], byte[] 
cfRenameMap;
-305private ListUUID 
clusterIds;
-306private Filter filter;
-307private Durability durability;
-308
-309/**
-310 * @param row  The current table row 
key.
-311 * @param value  The columns.
-312 * @param context  The current 
context.
-313 * @throws IOException When something 
is broken with the data.
-314 */
-315@Override
-316public void 
map(ImmutableBytesWritable row, Result value,
-317  Context context)
-318throws IOException {
-319  try {
-320writeResult(row, value, 
context);
-321  } catch (InterruptedException e) 
{
-322e.printStackTrace();
-323  }
-324}
-325
-326private void 
writeResult(ImmutableBytesWritable key, Result result, Context context)
-327throws IOException, 
InterruptedException {
-328  Put put = null;
-329  Delete delete = null;
-330  if 

[32/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

2018-08-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
index ef680de..f919922 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.NewestLogFilter.html
@@ -46,120 +46,120 @@
 038import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 039import 
org.apache.hadoop.hbase.client.Admin;
 040import 
org.apache.hadoop.hbase.client.Connection;
-041import 
org.apache.hadoop.hbase.util.FSUtils;
-042import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-043import 
org.apache.yetus.audience.InterfaceAudience;
-044import org.slf4j.Logger;
-045import org.slf4j.LoggerFactory;
-046
-047/**
-048 * After a full backup was created, the 
incremental backup will only store the changes made after
-049 * the last full or incremental backup. 
Creating the backup copies the logfiles in .logs and
-050 * .oldlogs since the last backup 
timestamp.
-051 */
-052@InterfaceAudience.Private
-053public class IncrementalBackupManager 
extends BackupManager {
-054  public static final Logger LOG = 
LoggerFactory.getLogger(IncrementalBackupManager.class);
-055
-056  public 
IncrementalBackupManager(Connection conn, Configuration conf) throws 
IOException {
-057super(conn, conf);
-058  }
-059
-060  /**
-061   * Obtain the list of logs that need to 
be copied out for this incremental backup. The list is set
-062   * in BackupInfo.
-063   * @return The new HashMap of RS log 
time stamps after the log roll for this incremental backup.
-064   * @throws IOException exception
-065   */
-066  public HashMapString, Long 
getIncrBackupLogFileMap() throws IOException {
-067ListString logList;
-068HashMapString, Long 
newTimestamps;
-069HashMapString, Long 
previousTimestampMins;
-070
-071String savedStartCode = 
readBackupStartCode();
-072
-073// key: tableName
-074// value: 
RegionServer,PreviousTimeStamp
-075HashMapTableName, 
HashMapString, Long previousTimestampMap = readLogTimestampMap();
-076
-077previousTimestampMins = 
BackupUtils.getRSLogTimestampMins(previousTimestampMap);
-078
-079if (LOG.isDebugEnabled()) {
-080  LOG.debug("StartCode " + 
savedStartCode + "for backupID " + backupInfo.getBackupId());
-081}
-082// get all new log files from .logs 
and .oldlogs after last TS and before new timestamp
-083if (savedStartCode == null || 
previousTimestampMins == null
-084|| 
previousTimestampMins.isEmpty()) {
-085  throw new IOException(
-086  "Cannot read any previous back 
up timestamps from backup system table. "
-087  + "In order to create an 
incremental backup, at least one full backup is needed.");
-088}
-089
-090LOG.info("Execute roll log procedure 
for incremental backup ...");
-091HashMapString, String props = 
new HashMap();
-092props.put("backupRoot", 
backupInfo.getBackupRootDir());
-093
-094try (Admin admin = conn.getAdmin()) 
{
-095  
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
-096
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
-097}
-098newTimestamps = 
readRegionServerLastLogRollResult();
-099
-100logList = 
getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
savedStartCode);
-101ListWALItem 
logFromSystemTable =
-102
getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
getBackupInfo()
-103.getBackupRootDir());
-104logList = 
excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
-105
backupInfo.setIncrBackupFileList(logList);
-106
-107return newTimestamps;
-108  }
-109
-110  /**
-111   * Get list of WAL files eligible for 
incremental backup.
-112   *
-113   * @return list of WAL files
-114   * @throws IOException if getting the 
list of WAL files fails
-115   */
-116  public ListString 
getIncrBackupLogFileList() throws IOException {
-117ListString logList;
-118HashMapString, Long 
newTimestamps;
-119HashMapString, Long 
previousTimestampMins;
-120
-121String savedStartCode = 
readBackupStartCode();
-122
-123// key: tableName
-124// value: 
RegionServer,PreviousTimeStamp
-125HashMapTableName, 
HashMapString, Long previousTimestampMap = readLogTimestampMap();
-126
-127previousTimestampMins = 
BackupUtils.getRSLogTimestampMins(previousTimestampMap);
-128
-129if (LOG.isDebugEnabled()) {
-130  LOG.debug("StartCode " + 
savedStartCode + "for backupID " + backupInfo.getBackupId());
-131}
-132  

[32/51] [partial] hbase-site git commit: Published site at ba5d1c1f28301adc99019d9d6c4a04fac98ae511.

2018-07-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
index 3e93d5a..b2f9e8d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.html
@@ -362,7 +362,7 @@ extends AbstractStateMachineTableProcedure
-acquireLock,
 checkOnline,
 checkTableModifiable,
 getRegionDir,
 getUser, preflightChecks,
 releaseLock,
 releaseSyncLatch,
 setUser,
 toStringClassDetails
+acquireLock,
 checkOnline,
 checkTableModifiable,
 getRegionDir,
 getUser, preflightChecks,
 releaseLock,
 releaseSyncLatch,
 setUser,
 toStringClassDetails,
 waitInitialized
 
 
 
@@ -376,7 +376,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState, getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout, haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isWaiting--">isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB, updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doExecute,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode, getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock, 
incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner, setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout, setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
index f455cb4..c08b2dc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
@@ -364,7 +364,7 @@ extends AbstractStateMachineTableProcedure
-acquireLock,
 checkOnline,
 checkTableModifiable,
 getRegionDir,
 getUser, preflightChecks,
 releaseLock,
 releaseSyncLatch,
 setUser,
 toStringClassDetails
+acquireLock,
 checkOnline,
 checkTableModifiable,
 getRegionDir,
 getUser, preflightChecks,
 releaseLock,
 releaseSyncLatch,
 setUser,
 toStringClassDetails,
 waitInitialized
 
 
 
@@ -378,7 +378,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState, getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout, haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 <
 a 

[32/51] [partial] hbase-site git commit: Published site at b4759ce6e72f50ccd9d410bd5917dc5a515414f1.

2018-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerStateNode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerStateNode.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerStateNode.html
index 5f41fe7..c8158b5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerStateNode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.ServerStateNode.html
@@ -524,636 +524,639 @@
 516}
 517  }
 518
-519  ArrayListRegionStateNode 
getTableRegionStateNodes(final TableName tableName) {
-520final 
ArrayListRegionStateNode regions = new 
ArrayListRegionStateNode();
-521for (RegionStateNode node: 
regionsMap.tailMap(tableName.getName()).values()) {
-522  if 
(!node.getTable().equals(tableName)) break;
-523  regions.add(node);
-524}
-525return regions;
-526  }
-527
-528  ArrayListRegionState 
getTableRegionStates(final TableName tableName) {
-529final ArrayListRegionState 
regions = new ArrayListRegionState();
-530for (RegionStateNode node: 
regionsMap.tailMap(tableName.getName()).values()) {
-531  if 
(!node.getTable().equals(tableName)) break;
-532  
regions.add(node.toRegionState());
-533}
-534return regions;
-535  }
-536
-537  ArrayListRegionInfo 
getTableRegionsInfo(final TableName tableName) {
-538final ArrayListRegionInfo 
regions = new ArrayListRegionInfo();
-539for (RegionStateNode node: 
regionsMap.tailMap(tableName.getName()).values()) {
-540  if 
(!node.getTable().equals(tableName)) break;
-541  
regions.add(node.getRegionInfo());
-542}
-543return regions;
-544  }
-545
-546  CollectionRegionStateNode 
getRegionStateNodes() {
-547return regionsMap.values();
+519  public void deleteRegions(final 
ListRegionInfo regionInfos) {
+520
regionInfos.forEach(this::deleteRegion);
+521  }
+522
+523  ArrayListRegionStateNode 
getTableRegionStateNodes(final TableName tableName) {
+524final 
ArrayListRegionStateNode regions = new 
ArrayListRegionStateNode();
+525for (RegionStateNode node: 
regionsMap.tailMap(tableName.getName()).values()) {
+526  if 
(!node.getTable().equals(tableName)) break;
+527  regions.add(node);
+528}
+529return regions;
+530  }
+531
+532  ArrayListRegionState 
getTableRegionStates(final TableName tableName) {
+533final ArrayListRegionState 
regions = new ArrayListRegionState();
+534for (RegionStateNode node: 
regionsMap.tailMap(tableName.getName()).values()) {
+535  if 
(!node.getTable().equals(tableName)) break;
+536  
regions.add(node.toRegionState());
+537}
+538return regions;
+539  }
+540
+541  ArrayListRegionInfo 
getTableRegionsInfo(final TableName tableName) {
+542final ArrayListRegionInfo 
regions = new ArrayListRegionInfo();
+543for (RegionStateNode node: 
regionsMap.tailMap(tableName.getName()).values()) {
+544  if 
(!node.getTable().equals(tableName)) break;
+545  
regions.add(node.getRegionInfo());
+546}
+547return regions;
 548  }
 549
-550  public ArrayListRegionState 
getRegionStates() {
-551final ArrayListRegionState 
regions = new ArrayListRegionState(regionsMap.size());
-552for (RegionStateNode node: 
regionsMap.values()) {
-553  
regions.add(node.toRegionState());
-554}
-555return regions;
-556  }
-557
-558  // 
==
-559  //  RegionState helpers
-560  // 
==
-561  public RegionState getRegionState(final 
RegionInfo regionInfo) {
-562RegionStateNode regionStateNode = 
getRegionStateNode(regionInfo);
-563return regionStateNode == null ? null 
: regionStateNode.toRegionState();
-564  }
-565
-566  public RegionState getRegionState(final 
String encodedRegionName) {
-567// TODO: Need a map encodedName, 
... but it is just dispatch merge...
-568for (RegionStateNode node: 
regionsMap.values()) {
-569  if 
(node.getRegionInfo().getEncodedName().equals(encodedRegionName)) {
-570return node.toRegionState();
-571  }
-572}
-573return null;
-574  }
-575
-576  // 

-577  //  TODO: helpers
-578  // 

-579  public boolean 
hasTableRegionStates(final TableName tableName) {
-580// TODO
-581return 
!getTableRegionStates(tableName).isEmpty();
-582  }
-583
-584  /**
-585   * @return Return online regions of 
table; does not include OFFLINE or SPLITTING regions.
-586   */
-587  public ListRegionInfo 
getRegionsOfTable(final TableName table) {

[32/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0c6f447e/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index abd788f..81bf813 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -20,38 +20,38 @@
 //-->
 
 
-您的浏览器已禁用 JavaScript。
+JavaScript is disabled on your browser.
 
 
 
 
 
-跳过导航链接
+Skip navigation links
 
 
 
-
-概览
-程序包
-ç±»
-使用
-树
-已过时
-索引
-帮助
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
 
 
 
 
-上一个类
-下一个类
+PrevClass
+NextClass
 
 
-框架
-无框架
+Frames
+NoFrames
 
 
-所有类
+AllClasses
 
 
 

[32/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5427a45e/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index b7f6e3e..9f13b1b 100644
--- a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -19,45 +19,45 @@
 }
 //-->
 var methods = 
{"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":41,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":42,"i22":42,"i23":42,"i24":42,"i25":42,"i26":42,"i27":41,"i28":42,"i29":42,"i30":42,"i31":42,"i32":42,"i33":42,"i34":42,"i35":42,"i36":42,"i37":42,"i38":42,"i39":42,"i40":41,"i41":42,"i42":42,"i43":42,"i44":41,"i45":42,"i46":42,"i47":42,"i48":42,"i49":42,"i50":42,"i51":42,"i52":42,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":42,"i59":42,"i60":42,"i61":42,"i62":42,"i63":42,"i64":42,"i65":42,"i66":42,"i67":42,"i68":42,"i69":42,"i70":42,"i71":42,"i72":42,"i73":42,"i74":42,"i75":42,"i76":42,"i77":42,"i78":42,"i79":42,"i80":42,"i81":42,"i82":42};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
+var tabs = 
{65535:["t0","所有方法"],1:["t1","静态方法"],2:["t2","实例方法"],8:["t4","å
…·ä½“方法"],32:["t6","已过时的方法"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
 var activeTableTab = "activeTableTab";
 
 
-JavaScript is disabled on your browser.
+您的浏览器已禁用 JavaScript。
 
 
 
 
 
-Skip navigation links
+跳过导航链接
 
 
 
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
+
+概览
+程序包
+ç±»
+使用
+树
+已过时
+索引
+帮助
 
 
 
 
-PrevClass
-NextClass
+上一个类
+下一个类
 
 
-Frames
-NoFrames
+框架
+无框架
 
 
-AllClasses
+所有类
 
 
 

[32/51] [partial] hbase-site git commit: Published site at 0f23784182ab88649de340d75804e0ff20dcd0fc.

2018-07-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index 30d4887..47ab720 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HRegion.RegionScannerImpl
+class HRegion.RegionScannerImpl
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RegionScanner, Shipper, RpcCallback
 RegionScannerImpl is used to combine scanners from multiple 
Stores (aka column families).
@@ -425,7 +425,7 @@ implements 
 
 storeHeap
-KeyValueHeap storeHeap
+KeyValueHeap storeHeap
 
 
 
@@ -434,7 +434,7 @@ implements 
 
 joinedHeap
-KeyValueHeap joinedHeap
+KeyValueHeap joinedHeap
 Heap of key-values that are not essential for the provided 
filters and are thus read
  on demand, if on-demand column family loading is enabled.
 
@@ -445,7 +445,7 @@ implements 
 
 joinedContinuationRow
-protectedCell joinedContinuationRow
+protectedCell joinedContinuationRow
 If the joined heap data gathering is interrupted due to 
scan limits, this will
  contain the row for which we are populating the values.
 
@@ -456,7 +456,7 @@ implements 
 
 filterClosed
-privateboolean filterClosed
+privateboolean filterClosed
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 stopRow
-protected finalbyte[] stopRow
+protected finalbyte[] stopRow
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 includeStopRow
-protected finalboolean includeStopRow
+protected finalboolean includeStopRow
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 region
-protected finalHRegion region
+protected finalHRegion region
 
 
 
@@ -492,7 +492,7 @@ implements 
 
 comparator
-protected finalCellComparator comparator
+protected finalCellComparator comparator
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 readPt
-private finallong readPt
+private finallong readPt
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 maxResultSize
-private finallong maxResultSize
+private finallong maxResultSize
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 defaultScannerContext
-private finalScannerContext defaultScannerContext
+private finalScannerContext defaultScannerContext
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 filter
-private finalFilterWrapper filter
+private finalFilterWrapper filter
 
 
 
@@ -545,7 +545,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -561,7 +561,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion,
   longnonceGroup,
@@ -587,7 +587,7 @@ implements 
 
 getRegionInfo
-publicRegionInfogetRegionInfo()
+publicRegionInfogetRegionInfo()
 
 Specified by:
 getRegionInfoin
 interfaceRegionScanner
@@ -602,7 +602,7 @@ implements 
 
 initializeScanners
-protectedvoidinitializeScanners(Scanscan,
+protectedvoidinitializeScanners(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -617,7 +617,7 @@ implements 
 
 initializeKVHeap
-protectedvoidinitializeKVHeap(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
+protectedvoidinitializeKVHeap(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerjoinedScanners,
 HRegionregion)
  throws 

[32/51] [partial] hbase-site git commit: Published site at 85b41f36e01214b6485c9352875c84ebf877dab3.

2018-06-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a5c66de0/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
index c139704..8ca0076 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
@@ -588,7 +588,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 REGION_STATE_STAMP_COMPARATOR
-public static finalRegionStates.RegionStateStampComparator
 REGION_STATE_STAMP_COMPARATOR
+public static finalRegionStates.RegionStateStampComparator
 REGION_STATE_STAMP_COMPARATOR
 
 
 
@@ -597,7 +597,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionsMap
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapbyte[],RegionStates.RegionStateNode 
regionsMap
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapbyte[],RegionStates.RegionStateNode 
regionsMap
 RegionName -- i.e. RegionInfo.getRegionName() -- as bytes 
to RegionStates.RegionStateNode
 
 
@@ -607,7 +607,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionInTransition
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapRegionInfo,RegionStates.RegionStateNode 
regionInTransition
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapRegionInfo,RegionStates.RegionStateNode 
regionInTransition
 
 
 
@@ -616,7 +616,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionOffline
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapRegionInfo,RegionStates.RegionStateNode 
regionOffline
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapRegionInfo,RegionStates.RegionStateNode 
regionOffline
 Regions marked as offline on a read of hbase:meta. Unused 
or at least, once
  offlined, regions have no means of coming on line again. TODO.
 
@@ -627,7 +627,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionFailedOpen
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapbyte[],RegionStates.RegionFailedOpen
 regionFailedOpen
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentSkipListMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentSkipListMapbyte[],RegionStates.RegionFailedOpen
 regionFailedOpen
 
 
 
@@ -636,7 +636,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 serverMap
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentHashMapServerName,RegionStates.ServerStateNode 
serverMap
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentHashMapServerName,RegionStates.ServerStateNode 
serverMap
 
 
 
@@ -653,7 +653,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 RegionStates
-publicRegionStates()
+publicRegionStates()
 
 
 
@@ -670,7 +670,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 clear
-publicvoidclear()
+publicvoidclear()
 
 
 
@@ -679,7 +679,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 isRegionInRegionStates
-publicbooleanisRegionInRegionStates(RegionInfohri)
+publicbooleanisRegionInRegionStates(RegionInfohri)
 
 
 
@@ -688,7 +688,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 createRegionStateNode
-protectedRegionStates.RegionStateNodecreateRegionStateNode(RegionInforegionInfo)
+protectedRegionStates.RegionStateNodecreateRegionStateNode(RegionInforegionInfo)
 
 
 
@@ -697,7 

[32/51] [partial] hbase-site git commit: Published site at 6198e1fc7dfa85c3bc6b2855f9a5fb5f4b2354ff.

2018-06-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 8618b6c..97d0108 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":18,"i3":6,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":18,"i17":18,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":18,"i50":6,"i51":6,"i52":18,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":18,"i61":18,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":18,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":18,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":18,"i94":6,"i95":6,"i96":6,"i97":18,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":18,"i104":18,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119"
 
:6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":18,"i132":18,"i133":6,"i134":6,"i135":6,"i136":6,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6};
+var methods = 
{"i0":6,"i1":6,"i2":18,"i3":6,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":18,"i17":18,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":18,"i50":6,"i51":6,"i52":18,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":18,"i61":18,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":18,"i70":6,"i71":18,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":18,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":18,"i95":6,"i96":6,"i97":6,"i98":18,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":18,"i105":18,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119
 
":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":18,"i133":18,"i134":6,"i135":6,"i136":6,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface AsyncAdmin
+public interface AsyncAdmin
 The asynchronous administrative API for HBase.
 
 Since:
@@ -576,46 +576,52 @@ public interface 
+default https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureSyncReplicationState
+getReplicationPeerSyncReplicationState(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
+Get the current cluster state in a synchronous replication 
peer.
+
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSecurityCapability
 getSecurityCapabilities()
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isBalancerEnabled()
 Query the current state of the balancer.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isCatalogJanitorEnabled()
 Query on the 

[32/51] [partial] hbase-site git commit: Published site at 14087cc919da9f2e0b1a68f701f6365ad9d1d71f.

2018-06-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 6e573df..8f1a6b9 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.io.hfile.BlockType
 org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
+org.apache.hadoop.hbase.io.hfile.BlockPriority
 org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
-org.apache.hadoop.hbase.io.hfile.BlockType
-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
-org.apache.hadoop.hbase.io.hfile.BlockPriority
+org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 29a79fd..1f90dde 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -349,9 +349,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
 org.apache.hadoop.hbase.ipc.CallEvent.Type
 org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
+org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 81c504b..deb19ba 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -293,10 +293,10 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.mapreduce.TableSplit.Version
 org.apache.hadoop.hbase.mapreduce.CellCounter.CellCounterMapper.Counters
-org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper.Counters
 org.apache.hadoop.hbase.mapreduce.SyncTable.SyncMapper.Counter
+org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper.Counters
+org.apache.hadoop.hbase.mapreduce.TableSplit.Version
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
index ffe8371..ab375f4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class AssignmentManager.RegionInTransitionChore
+private static class AssignmentManager.RegionInTransitionChore
 extends 

[32/51] [partial] hbase-site git commit: Published site at 72784c2d836a4b977667449d3adec5e8d15453f5.

2018-06-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b11656f/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
index b6e7636..592c2cc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
@@ -356,3901 +356,3924 @@
 348  public FutureVoid 
modifyTableAsync(TableDescriptor td) throws IOException {
 349ModifyTableResponse response = 
executeCallable(
 350  new 
MasterCallableModifyTableResponse(getConnection(), 
getRpcControllerFactory()) {
-351@Override
-352protected ModifyTableResponse 
rpcCall() throws Exception {
-353  
setPriority(td.getTableName());
-354  ModifyTableRequest request = 
RequestConverter.buildModifyTableRequest(
-355td.getTableName(), td, 
ng.getNonceGroup(), ng.newNonce());
-356  return 
master.modifyTable(getRpcController(), request);
-357}
-358  });
-359return new ModifyTableFuture(this, 
td.getTableName(), response);
-360  }
-361
-362  @Override
-363  public ListTableDescriptor 
listTableDescriptorsByNamespace(byte[] name) throws IOException {
-364return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-365getRpcControllerFactory()) {
-366  @Override
-367  protected 
ListTableDescriptor rpcCall() throws Exception {
-368return 
master.listTableDescriptorsByNamespace(getRpcController(),
-369
ListTableDescriptorsByNamespaceRequest.newBuilder()
-370  
.setNamespaceName(Bytes.toString(name)).build())
-371.getTableSchemaList()
-372.stream()
-373
.map(ProtobufUtil::toTableDescriptor)
-374
.collect(Collectors.toList());
-375  }
-376});
-377  }
-378
-379  @Override
-380  public ListTableDescriptor 
listTableDescriptors(ListTableName tableNames) throws IOException {
-381return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-382getRpcControllerFactory()) {
-383  @Override
-384  protected 
ListTableDescriptor rpcCall() throws Exception {
-385GetTableDescriptorsRequest req 
=
-386
RequestConverter.buildGetTableDescriptorsRequest(tableNames);
-387  return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
-388  req));
-389  }
-390});
-391  }
-392
-393  @Override
-394  public ListRegionInfo 
getRegions(final ServerName sn) throws IOException {
-395AdminService.BlockingInterface admin 
= this.connection.getAdmin(sn);
-396// TODO: There is no timeout on this 
controller. Set one!
-397HBaseRpcController controller = 
rpcControllerFactory.newController();
-398return 
ProtobufUtil.getOnlineRegions(controller, admin);
-399  }
-400
-401  @Override
-402  public ListRegionInfo 
getRegions(TableName tableName) throws IOException {
-403if 
(TableName.isMetaTableName(tableName)) {
-404  return 
Arrays.asList(RegionInfoBuilder.FIRST_META_REGIONINFO);
-405} else {
-406  return 
MetaTableAccessor.getTableRegions(connection, tableName, true);
-407}
-408  }
-409
-410  private static class 
AbortProcedureFuture extends ProcedureFutureBoolean {
-411private boolean isAbortInProgress;
-412
-413public AbortProcedureFuture(
-414final HBaseAdmin admin,
-415final Long procId,
-416final Boolean abortProcResponse) 
{
-417  super(admin, procId);
-418  this.isAbortInProgress = 
abortProcResponse;
-419}
-420
-421@Override
-422public Boolean get(long timeout, 
TimeUnit unit)
-423throws InterruptedException, 
ExecutionException, TimeoutException {
-424  if (!this.isAbortInProgress) {
-425return false;
-426  }
-427  super.get(timeout, unit);
-428  return true;
-429}
-430  }
-431
-432  /** @return Connection used by this 
object. */
-433  @Override
-434  public Connection getConnection() {
-435return connection;
-436  }
-437
-438  @Override
-439  public boolean tableExists(final 
TableName tableName) throws IOException {
-440return executeCallable(new 
RpcRetryingCallableBoolean() {
-441  @Override
-442  protected Boolean rpcCall(int 
callTimeout) throws Exception {
-443return 
MetaTableAccessor.tableExists(connection, tableName);
-444  }
-445});
-446  }
-447
-448  @Override
-449  public HTableDescriptor[] listTables() 
throws IOException {
-450return listTables((Pattern)null, 
false);
-451  }
-452
-453  @Override
-454  public HTableDescriptor[] 
listTables(Pattern pattern) throws 

[32/51] [partial] hbase-site git commit: Published site at 9101fc246f86445006bfbcdfda5cc495016dc280.

2018-06-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/65565d77/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
index 1d20bab..15825d6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":9,"i17":10,"i18":9,"i19":10,"i20":9,"i21":9,"i22":9,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":9,"i17":10,"i18":9,"i19":10,"i20":9,"i21":9,"i22":9,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":10,"i32":10,"i33":42,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":42,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class MasterProcedureScheduler
+public class MasterProcedureScheduler
 extends AbstractProcedureScheduler
 ProcedureScheduler for the Master Procedures.
  This ProcedureScheduler tries to provide to the ProcedureExecutor procedures
@@ -269,7 +269,7 @@ extends 
-All MethodsStatic MethodsInstance MethodsConcrete Methods
+All MethodsStatic MethodsInstance MethodsConcrete MethodsDeprecated Methods
 
 Modifier and Type
 Method and Description
@@ -443,12 +443,15 @@ extends 
 boolean
 waitMetaExclusiveLock(Procedure?procedure)
-Try to acquire the exclusive lock on meta.
+Deprecated.
+only used for RecoverMetaProcedure.
 Should be removed along with
+ RecoverMetaProcedure.
+
 
 
 
 boolean
-waitNamespaceExclusiveLock(Procedureprocedure,
+waitNamespaceExclusiveLock(Procedure?procedure,
   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace)
 Suspend the procedure if the specified namespace is already 
locked.
 
@@ -462,14 +465,14 @@ extends 
 boolean
-waitRegion(Procedureprocedure,
+waitRegion(Procedure?procedure,
   RegionInforegionInfo)
 Suspend the procedure if the specified region is already 
locked.
 
 
 
 boolean
-waitRegions(Procedureprocedure,
+waitRegions(Procedure?procedure,
TableNametable,
RegionInfo...regionInfo)
 Suspend the procedure if the specified set of regions are 
already locked.
@@ -484,7 +487,7 @@ extends 
 boolean
-waitTableExclusiveLock(Procedureprocedure,
+waitTableExclusiveLock(Procedure?procedure,
   TableNametable)
 Suspend the procedure if the specified table is already 
locked.
 
@@ -496,7 +499,7 @@ extends 
 boolean
-waitTableSharedLock(Procedureprocedure,
+waitTableSharedLock(Procedure?procedure,
TableNametable)
 Suspend the procedure if the specified table is already 
locked.
 
@@ -504,12 +507,15 @@ extends 
 void
 wakeMetaExclusiveLock(Procedure?procedure)
-Wake the procedures waiting for meta.
+Deprecated.
+only used for RecoverMetaProcedure.
 Should be removed along with
+ RecoverMetaProcedure.
+
 
 
 
 void
-wakeNamespaceExclusiveLock(Procedureprocedure,
+wakeNamespaceExclusiveLock(Procedure?procedure,
   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace)
 Wake the procedures waiting for the specified 
namespace
 
@@ -523,14 +529,14 @@ extends 
 void
-wakeRegion(Procedureprocedure,
+wakeRegion(Procedure?procedure,
   RegionInforegionInfo)
 Wake the procedures waiting for the specified region
 
 
 
 void
-wakeRegions(Procedureprocedure,
+wakeRegions(Procedure?procedure,
TableNametable,
RegionInfo...regionInfo)
 Wake the procedures waiting for the specified regions
@@ -545,14 +551,14 @@ extends 
 void

[32/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[32/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

2018-06-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html 
b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
index 90fd656..85c6bf7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
@@ -972,7 +972,7 @@ implements 
 
 removeReplicatorIfQueueIsEmpty
-publicvoidremoveReplicatorIfQueueIsEmpty(ServerNameserverName)
+publicvoidremoveReplicatorIfQueueIsEmpty(ServerNameserverName)
 throws ReplicationException
 Description copied from 
interface:ReplicationQueueStorage
 Remove the record of region server if the queue is 
empty.
@@ -990,7 +990,7 @@ implements 
 
 getListOfReplicators0
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamegetListOfReplicators0()
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamegetListOfReplicators0()
 throws 
org.apache.zookeeper.KeeperException
 
 Throws:
@@ -1004,7 +1004,7 @@ implements 
 
 getListOfReplicators
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamegetListOfReplicators()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamegetListOfReplicators()
   throws ReplicationException
 Description copied from 
interface:ReplicationQueueStorage
 Get a list of all region servers that have outstanding 
replication queues. These servers could
@@ -1025,7 +1025,7 @@ implements 
 
 getWALsInQueue0
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetWALsInQueue0(ServerNameserverName,
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetWALsInQueue0(ServerNameserverName,
  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId)
   throws org.apache.zookeeper.KeeperException
 
@@ -1040,7 +1040,7 @@ implements 
 
 getWALsInQueue
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetWALsInQueue(ServerNameserverName,
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetWALsInQueue(ServerNameserverName,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId)
 throws ReplicationException
 Description copied from 
interface:ReplicationQueueStorage
@@ -1064,7 +1064,7 @@ implements 
 
 getAllQueues0
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetAllQueues0(ServerNameserverName)
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetAllQueues0(ServerNameserverName)
 throws org.apache.zookeeper.KeeperException
 
 Throws:
@@ -1078,7 +1078,7 @@ implements 
 
 getAllQueues
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetAllQueues(ServerNameserverName)

[32/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

2018-05-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
 
b/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
index c386d6c..d8d90b4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
@@ -244,7 +244,8 @@ implements (package private) 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest
 convert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestrequest)
 Deprecated.
-
+Convert from CPEP protobuf 2.5 to internal protobuf 
3.3.
+
 
 
 (package private) 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest
@@ -257,7 +258,8 @@ implements (package private) 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest
 convert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequestrequest)
 Deprecated.
-
+Convert from CPEP protobuf 2.5 to internal protobuf 
3.3.
+
 
 
 private 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest
@@ -469,9 +471,10 @@ implements 
 
 convert
-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequestconvert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequestrequest)
+org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequestconvert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequestrequest)

throws 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException
 Deprecated.
+Convert from CPEP protobuf 2.5 to internal protobuf 
3.3.
 
 Throws:
 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException
@@ -484,7 +487,7 @@ implements 
 
 cleanupBulkLoad
-publicvoidcleanupBulkLoad(com.google.protobuf.RpcControllercontroller,
+publicvoidcleanupBulkLoad(com.google.protobuf.RpcControllercontroller,
 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequestrequest,
 
com.google.protobuf.RpcCallbackorg.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponsedone)
 Deprecated.
@@ -500,7 +503,7 @@ implements 
 
 convert
-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequestconvert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequestrequest)
+org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequestconvert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequestrequest)

throws 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException
 Deprecated.
 Convert from CPEP protobuf 2.5 to internal protobuf 
3.3.
@@ -516,7 +519,7 @@ implements 
 
 secureBulkLoadHFiles
-publicvoidsecureBulkLoadHFiles(com.google.protobuf.RpcControllercontroller,
+publicvoidsecureBulkLoadHFiles(com.google.protobuf.RpcControllercontroller,
  
org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequestrequest,
  
com.google.protobuf.RpcCallbackorg.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponsedone)
 Deprecated.
@@ -532,9 +535,10 @@ implements 
 
 convert
-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequestconvert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestrequest)
+org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequestconvert(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestrequest)

  throws 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException
 Deprecated.
+Convert from CPEP protobuf 2.5 to internal protobuf 
3.3.
 
 Throws:
 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException
@@ -547,7 +551,7 @@ implements 
 
 ConvertSecureBulkLoadHFilesRequest
-privateorg.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestConvertSecureBulkLoadHFilesRequest(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequestrequest)

[32/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

2018-05-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
index 31cd218..5541ab5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/CellComparator.html
@@ -29,116 +29,124 @@
 021
 022import 
org.apache.yetus.audience.InterfaceAudience;
 023import 
org.apache.yetus.audience.InterfaceStability;
-024/**
-025 * Comparator for comparing cells and has 
some specialized methods that allows comparing individual
-026 * cell components like row, family, 
qualifier and timestamp
-027 */
-028@InterfaceAudience.Public
-029@InterfaceStability.Evolving
-030public interface CellComparator extends 
ComparatorCell {
-031  /**
-032   * A comparator for ordering cells in 
user-space tables. Useful when writing cells in sorted
-033   * order as necessary for bulk import 
(i.e. via MapReduce)
-034   * p
-035   * CAUTION: This comparator may provide 
inaccurate ordering for cells from system tables,
-036   * and should not be relied upon in 
that case.
-037   */
-038  static CellComparator getInstance() {
-039return 
CellComparatorImpl.COMPARATOR;
-040  }
-041
-042  /**
-043   * Lexographically compares two cells. 
The key part of the cell is taken for comparison which
-044   * includes row, family, qualifier, 
timestamp and type
-045   * @param leftCell the left hand side 
cell
-046   * @param rightCell the right hand side 
cell
-047   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-048   * cells are equal
-049   */
-050  @Override
-051  int compare(Cell leftCell, Cell 
rightCell);
-052
-053  /**
-054   * Compare cells.
-055   * @param ignoreSequenceid True if we 
are to compare the key portion only and ignore
-056   * the sequenceid. Set to false to 
compare key and consider sequenceid.
-057   * @return 0 if equal, -1 if a lt; 
b, and +1 if a gt; b.
-058   */
-059  int compare(Cell leftCell, Cell 
rightCell, boolean ignoreSequenceid);
-060
-061  /**
-062   * Lexographically compares the rows of 
two cells.
-063   * @param leftCell the left hand side 
cell
-064   * @param rightCell the right hand side 
cell
-065   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-066   * cells are equal
-067   */
-068  int compareRows(Cell leftCell, Cell 
rightCell);
-069
-070  /**
-071   * Compares the row part of the cell 
with a simple plain byte[] like the
-072   * stopRow in Scan.
-073   * @param cell the cell
-074   * @param bytes the byte[] representing 
the row to be compared with
-075   * @param offset the offset of the 
byte[]
-076   * @param length the length of the 
byte[]
-077   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-078   * cells are equal
-079   */
-080  int compareRows(Cell cell, byte[] 
bytes, int offset, int length);
-081
-082  /**
-083   * Lexographically compares the two 
cells excluding the row part. It compares family, qualifier,
-084   * timestamp and the type
-085   * @param leftCell the left hand side 
cell
-086   * @param rightCell the right hand side 
cell
-087   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-088   * cells are equal
-089   */
-090  int compareWithoutRow(Cell leftCell, 
Cell rightCell);
-091
-092  /**
-093   * Lexographically compares the 
families of the two cells
-094   * @param leftCell the left hand side 
cell
-095   * @param rightCell the right hand side 
cell
-096   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-097   * cells are equal
-098   */
-099  int compareFamilies(Cell leftCell, Cell 
rightCell);
-100
-101  /**
-102   * Lexographically compares the 
qualifiers of the two cells
-103   * @param leftCell the left hand side 
cell
-104   * @param rightCell the right hand side 
cell
-105   * @return greater than 0 if leftCell 
is bigger, less than 0 if rightCell is bigger, 0 if both
-106   * cells are equal
-107   */
-108  int compareQualifiers(Cell leftCell, 
Cell rightCell);
-109
-110  /**
-111   * Compares cell's timestamps in 
DESCENDING order. The below older timestamps sorting ahead of
-112   * newer timestamps looks wrong but it 
is intentional. This way, newer timestamps are first found
-113   * when we iterate over a memstore and 
newer versions are the first we trip over when reading from
-114   * a store file.
-115   * @param leftCell the left hand side 
cell
-116   * @param rightCell the right hand side 
cell
-117   * @return 1 if left's timestamp 
lt; right's timestamp -1 if left's timestamp gt; right's
-118   * timestamp 0 

[32/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

2018-05-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
index 5404ea1..1812a55 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
@@ -35,901 +35,908 @@
 027import java.net.InetSocketAddress;
 028import java.net.URLDecoder;
 029import java.net.URLEncoder;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.List;
-033import java.util.Map;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.TreeSet;
-037import java.util.UUID;
-038import java.util.function.Function;
-039import java.util.stream.Collectors;
-040
-041import 
org.apache.commons.lang3.StringUtils;
-042import 
org.apache.hadoop.conf.Configuration;
-043import org.apache.hadoop.fs.FileSystem;
-044import org.apache.hadoop.fs.Path;
-045import org.apache.hadoop.hbase.Cell;
-046import 
org.apache.hadoop.hbase.CellComparator;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import 
org.apache.hadoop.hbase.HRegionLocation;
-050import 
org.apache.hadoop.hbase.HTableDescriptor;
-051import 
org.apache.hadoop.hbase.KeyValue;
-052import 
org.apache.hadoop.hbase.PrivateCellUtil;
-053import 
org.apache.hadoop.hbase.TableName;
-054import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-055import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-056import 
org.apache.hadoop.hbase.client.Connection;
-057import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionLocator;
-060import 
org.apache.hadoop.hbase.client.Table;
-061import 
org.apache.hadoop.hbase.client.TableDescriptor;
-062import 
org.apache.hadoop.hbase.fs.HFileSystem;
-063import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-064import 
org.apache.hadoop.hbase.io.compress.Compression;
-065import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-066import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-067import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-068import 
org.apache.hadoop.hbase.io.hfile.HFile;
-069import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-070import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-071import 
org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
-072import 
org.apache.hadoop.hbase.regionserver.BloomType;
-073import 
org.apache.hadoop.hbase.regionserver.HStore;
-074import 
org.apache.hadoop.hbase.regionserver.StoreFileWriter;
-075import 
org.apache.hadoop.hbase.util.Bytes;
-076import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-077import 
org.apache.hadoop.hbase.util.FSUtils;
-078import 
org.apache.hadoop.hbase.util.MapReduceExtendedCell;
-079import 
org.apache.hadoop.io.NullWritable;
-080import 
org.apache.hadoop.io.SequenceFile;
-081import org.apache.hadoop.io.Text;
-082import org.apache.hadoop.mapreduce.Job;
-083import 
org.apache.hadoop.mapreduce.OutputCommitter;
-084import 
org.apache.hadoop.mapreduce.OutputFormat;
-085import 
org.apache.hadoop.mapreduce.RecordWriter;
-086import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-087import 
org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
-088import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-089import 
org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
-090import 
org.apache.yetus.audience.InterfaceAudience;
-091import org.slf4j.Logger;
-092import org.slf4j.LoggerFactory;
-093
-094import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-095
-096/**
-097 * Writes HFiles. Passed Cells must 
arrive in order.
-098 * Writes current time as the sequence id 
for the file. Sets the major compacted
-099 * attribute on created @{link {@link 
HFile}s. Calling write(null,null) will forcibly roll
-100 * all HFiles being written.
-101 * p
-102 * Using this class as part of a 
MapReduce job is best done
-103 * using {@link 
#configureIncrementalLoad(Job, TableDescriptor, RegionLocator)}.
-104 */
-105@InterfaceAudience.Public
-106public class HFileOutputFormat2
-107extends 
FileOutputFormatImmutableBytesWritable, Cell {
-108  private static final Logger LOG = 
LoggerFactory.getLogger(HFileOutputFormat2.class);
-109  static class TableInfo {
-110private TableDescriptor 
tableDesctiptor;
-111private RegionLocator 
regionLocator;
-112
-113public TableInfo(TableDescriptor 
tableDesctiptor, RegionLocator regionLocator) {
-114  this.tableDesctiptor = 
tableDesctiptor;
-115 

[32/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

2018-05-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.InMemoryFlushRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.InMemoryFlushRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.InMemoryFlushRunnable.html
index d4390be..1037b84 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.InMemoryFlushRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.InMemoryFlushRunnable.html
@@ -163,12 +163,12 @@
 155   */
 156  @Override
 157  public MemStoreSize size() {
-158MemStoreSizing memstoreSizing = new 
MemStoreSizing();
+158MemStoreSizing memstoreSizing = new 
NonThreadSafeMemStoreSizing();
 159
memstoreSizing.incMemStoreSize(active.getMemStoreSize());
 160for (Segment item : 
pipeline.getSegments()) {
 161  
memstoreSizing.incMemStoreSize(item.getMemStoreSize());
 162}
-163return memstoreSizing;
+163return 
memstoreSizing.getMemStoreSize();
 164  }
 165
 166  /**
@@ -224,369 +224,365 @@
 216return new 
MemStoreSnapshot(snapshotId, this.snapshot);
 217  }
 218
-219  /**
-220   * On flush, how much memory we will 
clear.
-221   * @return size of data that is going 
to be flushed
-222   */
-223  @Override
-224  public MemStoreSize getFlushableSize() 
{
-225MemStoreSizing snapshotSizing = 
getSnapshotSizing();
-226if (snapshotSizing.getDataSize() == 
0) {
-227  // if snapshot is empty the tail of 
the pipeline (or everything in the memstore) is flushed
-228  if (compositeSnapshot) {
-229snapshotSizing = 
pipeline.getPipelineSizing();
-230
snapshotSizing.incMemStoreSize(active.getMemStoreSize());
-231  } else {
-232snapshotSizing = 
pipeline.getTailSizing();
-233  }
-234}
-235return snapshotSizing.getDataSize() 
 0 ? snapshotSizing
-236: new 
MemStoreSize(active.getMemStoreSize());
-237  }
-238
-239  @Override
-240  protected long keySize() {
-241// Need to consider keySize of all 
segments in pipeline and active
-242long k = this.active.keySize();
-243for (Segment segment : 
this.pipeline.getSegments()) {
-244  k += segment.keySize();
-245}
-246return k;
-247  }
-248
-249  @Override
-250  protected long heapSize() {
-251// Need to consider heapOverhead of 
all segments in pipeline and active
-252long h = this.active.heapSize();
-253for (Segment segment : 
this.pipeline.getSegments()) {
-254  h += segment.heapSize();
-255}
-256return h;
-257  }
-258
-259  @Override
-260  public void 
updateLowestUnflushedSequenceIdInWAL(boolean onlyIfGreater) {
-261long minSequenceId = 
pipeline.getMinSequenceId();
-262if(minSequenceId != Long.MAX_VALUE) 
{
-263  byte[] encodedRegionName = 
getRegionServices().getRegionInfo().getEncodedNameAsBytes();
-264  byte[] familyName = 
getFamilyNameInBytes();
-265  WAL WAL = 
getRegionServices().getWAL();
-266  if (WAL != null) {
-267
WAL.updateStore(encodedRegionName, familyName, minSequenceId, onlyIfGreater);
-268  }
-269}
-270  }
-271
-272  /**
-273   * This message intends to inform the 
MemStore that next coming updates
-274   * are going to be part of the 
replaying edits from WAL
-275   */
-276  @Override
-277  public void startReplayingFromWAL() {
-278inWalReplay = true;
-279  }
-280
-281  /**
-282   * This message intends to inform the 
MemStore that the replaying edits from WAL
-283   * are done
-284   */
-285  @Override
-286  public void stopReplayingFromWAL() {
-287inWalReplay = false;
-288  }
-289
-290  // the getSegments() method is used for 
tests only
-291  @VisibleForTesting
-292  @Override
-293  protected ListSegment 
getSegments() {
-294List? extends Segment 
pipelineList = pipeline.getSegments();
-295ListSegment list = new 
ArrayList(pipelineList.size() + 2);
-296list.add(this.active);
-297list.addAll(pipelineList);
-298
list.addAll(this.snapshot.getAllSegments());
-299
-300return list;
-301  }
-302
-303  // the following three methods allow to 
manipulate the settings of composite snapshot
-304  public void 
setCompositeSnapshot(boolean useCompositeSnapshot) {
-305this.compositeSnapshot = 
useCompositeSnapshot;
-306  }
-307
-308  public boolean 
swapCompactedSegments(VersionedSegmentsList versionedList, ImmutableSegment 
result,
-309  boolean merge) {
-310// last true stands for updating the 
region size
-311return pipeline.swap(versionedList, 
result, !merge, true);
-312  }
-313
-314  /**
-315   * @param requesterVersion The caller 
must hold the VersionedList of the pipeline
-316   *   with version taken 
earlier. This version must be passed as a parameter here.
-317   *   The flattening 

[32/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

2018-05-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
index 2510283..418c60c 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AppendTest.html
@@ -77,77 +77,77 @@
 069import 
org.apache.hadoop.hbase.client.RowMutations;
 070import 
org.apache.hadoop.hbase.client.Scan;
 071import 
org.apache.hadoop.hbase.client.Table;
-072import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-073import 
org.apache.hadoop.hbase.filter.Filter;
-074import 
org.apache.hadoop.hbase.filter.FilterAllFilter;
-075import 
org.apache.hadoop.hbase.filter.FilterList;
-076import 
org.apache.hadoop.hbase.filter.PageFilter;
-077import 
org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-078import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-079import 
org.apache.hadoop.hbase.io.compress.Compression;
-080import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-081import 
org.apache.hadoop.hbase.io.hfile.RandomDistribution;
-082import 
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-083import 
org.apache.hadoop.hbase.regionserver.BloomType;
-084import 
org.apache.hadoop.hbase.regionserver.CompactingMemStore;
-085import 
org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
-086import 
org.apache.hadoop.hbase.trace.SpanReceiverHost;
-087import 
org.apache.hadoop.hbase.trace.TraceUtil;
-088import 
org.apache.hadoop.hbase.util.ByteArrayHashKey;
-089import 
org.apache.hadoop.hbase.util.Bytes;
-090import 
org.apache.hadoop.hbase.util.Hash;
-091import 
org.apache.hadoop.hbase.util.MurmurHash;
-092import 
org.apache.hadoop.hbase.util.Pair;
-093import 
org.apache.hadoop.hbase.util.YammerHistogramUtils;
-094import 
org.apache.hadoop.io.LongWritable;
-095import org.apache.hadoop.io.Text;
-096import org.apache.hadoop.mapreduce.Job;
-097import 
org.apache.hadoop.mapreduce.Mapper;
-098import 
org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
-099import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-100import 
org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
-101import org.apache.hadoop.util.Tool;
-102import 
org.apache.hadoop.util.ToolRunner;
-103import 
org.apache.htrace.core.ProbabilitySampler;
-104import org.apache.htrace.core.Sampler;
-105import 
org.apache.htrace.core.TraceScope;
-106import 
org.apache.yetus.audience.InterfaceAudience;
-107import org.slf4j.Logger;
-108import org.slf4j.LoggerFactory;
-109import 
org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
-110import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-111
-112/**
-113 * Script used evaluating HBase 
performance and scalability.  Runs a HBase
-114 * client that steps through one of a set 
of hardcoded tests or 'experiments'
-115 * (e.g. a random reads test, a random 
writes test, etc.). Pass on the
-116 * command-line which test to run and how 
many clients are participating in
-117 * this experiment. Run {@code 
PerformanceEvaluation --help} to obtain usage.
-118 *
-119 * pThis class sets up and runs 
the evaluation programs described in
-120 * Section 7, iPerformance 
Evaluation/i, of the a
-121 * 
href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
-122 * paper, pages 8-10.
-123 *
-124 * pBy default, runs as a 
mapreduce job where each mapper runs a single test
-125 * client. Can also run as a 
non-mapreduce, multithreaded application by
-126 * specifying {@code --nomapred}. Each 
client does about 1GB of data, unless
-127 * specified otherwise.
-128 */
-129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-130public class PerformanceEvaluation 
extends Configured implements Tool {
-131  static final String RANDOM_SEEK_SCAN = 
"randomSeekScan";
-132  static final String RANDOM_READ = 
"randomRead";
-133  private static final Logger LOG = 
LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
-134  private static final ObjectMapper 
MAPPER = new ObjectMapper();
-135  static {
-136
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
-137  }
-138
-139  public static final String TABLE_NAME = 
"TestTable";
-140  public static final byte[] FAMILY_NAME 
= Bytes.toBytes("info");
-141  public static final byte [] COLUMN_ZERO 
= Bytes.toBytes("" + 0);
-142  public static final byte [] 
QUALIFIER_NAME = COLUMN_ZERO;
+072import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+073import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+074import 
org.apache.hadoop.hbase.filter.Filter;
+075import 
org.apache.hadoop.hbase.filter.FilterAllFilter;

[32/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[32/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

2018-04-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
index 9573c5f..392306b 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -2068,7 +2068,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 cmp
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorHBaseFsck.HbckInfo cmp
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorHBaseFsck.HbckInfo cmp
 
 
 
@@ -2979,7 +2979,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 preCheckPermission
-privatevoidpreCheckPermission()
+privatevoidpreCheckPermission()
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
 AccessDeniedException
 
@@ -2995,7 +2995,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 deleteMetaRegion
-privatevoiddeleteMetaRegion(HBaseFsck.HbckInfohi)
+privatevoiddeleteMetaRegion(HBaseFsck.HbckInfohi)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deletes region from meta table
 
@@ -3010,7 +3010,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 deleteMetaRegion
-privatevoiddeleteMetaRegion(byte[]metaKey)
+privatevoiddeleteMetaRegion(byte[]metaKey)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deletes region from meta table
 
@@ -3025,7 +3025,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 resetSplitParent
-privatevoidresetSplitParent(HBaseFsck.HbckInfohi)
+privatevoidresetSplitParent(HBaseFsck.HbckInfohi)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Reset the split parent region info in meta table
 
@@ -3040,7 +3040,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 offline
-privatevoidoffline(byte[]regionName)
+privatevoidoffline(byte[]regionName)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 This backwards-compatibility wrapper for permanently 
offlining a region
  that should not be alive.  If the region server does not support the
@@ -3060,7 +3060,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 undeployRegions
-privatevoidundeployRegions(HBaseFsck.HbckInfohi)
+privatevoidundeployRegions(HBaseFsck.HbckInfohi)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
  https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 
@@ -3076,7 +3076,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 undeployRegionsForHbi
-privatevoidundeployRegionsForHbi(HBaseFsck.HbckInfohi)
+privatevoidundeployRegionsForHbi(HBaseFsck.HbckInfohi)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 
@@ -3092,7 +3092,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 closeRegion
-privatevoidcloseRegion(HBaseFsck.HbckInfohi)
+privatevoidcloseRegion(HBaseFsck.HbckInfohi)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
  https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 Attempts to undeploy a region from a region server based in 
information in
@@ -3118,7 +3118,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 

[32/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

2018-03-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
index ad31b71..8ffb7dd 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RegionCoprocessorHost
+public class RegionCoprocessorHost
 extends CoprocessorHostRegionCoprocessor,RegionCoprocessorEnvironment
 Implements the coprocessor environment and runtime support 
for coprocessors
  loaded within a Region.
@@ -213,7 +213,7 @@ extends 
-private static 
org.apache.commons.collections4.map.ReferenceMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+private static 
org.apache.hbase.thirdparty.org.apache.commons.collections4.map.ReferenceMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 SHARED_DATA_MAP
 
 
@@ -796,7 +796,7 @@ extends 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -805,7 +805,7 @@ extends 
 
 SHARED_DATA_MAP
-private static 
finalorg.apache.commons.collections4.map.ReferenceMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object SHARED_DATA_MAP
+private static 
finalorg.apache.hbase.thirdparty.org.apache.commons.collections4.map.ReferenceMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object SHARED_DATA_MAP
 
 
 
@@ -814,7 +814,7 @@ extends 
 
 hasCustomPostScannerFilterRow
-private finalboolean hasCustomPostScannerFilterRow
+private finalboolean hasCustomPostScannerFilterRow
 
 
 
@@ -823,7 +823,7 @@ extends 
 
 rsServices
-RegionServerServices rsServices
+RegionServerServices rsServices
 The region server services
 
 
@@ -833,7 +833,7 @@ extends 
 
 region
-HRegion region
+HRegion region
 The region
 
 
@@ -843,7 +843,7 @@ extends 
 
 regionObserverGetter
-privateCoprocessorHost.ObserverGetterRegionCoprocessor,RegionObserver 
regionObserverGetter
+privateCoprocessorHost.ObserverGetterRegionCoprocessor,RegionObserver 
regionObserverGetter
 
 
 
@@ -852,7 +852,7 @@ extends 
 
 endpointObserverGetter
-privateCoprocessorHost.ObserverGetterRegionCoprocessor,EndpointObserver endpointObserverGetter
+privateCoprocessorHost.ObserverGetterRegionCoprocessor,EndpointObserver endpointObserverGetter
 
 
 
@@ -869,7 +869,7 @@ extends 
 
 RegionCoprocessorHost
-publicRegionCoprocessorHost(HRegionregion,
+publicRegionCoprocessorHost(HRegionregion,
  RegionServerServicesrsServices,
  
org.apache.hadoop.conf.Configurationconf)
 Constructor
@@ -895,7 +895,7 @@ extends 
 
 getTableCoprocessorAttrsFromSchema

[32/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

2018-03-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncTable.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncTable.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncTable.html
index 9bf7a7c..8103475 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncTable.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncTable.html
@@ -30,601 +30,605 @@
 022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
 023
 024import com.google.protobuf.RpcChannel;
-025
-026import java.util.List;
-027import 
java.util.concurrent.CompletableFuture;
-028import java.util.concurrent.TimeUnit;
-029import java.util.function.Function;
-030
-031import 
org.apache.hadoop.conf.Configuration;
-032import 
org.apache.hadoop.hbase.CompareOperator;
-033import 
org.apache.hadoop.hbase.TableName;
-034import 
org.apache.hadoop.hbase.util.Bytes;
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036
-037import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-038
-039/**
-040 * The interface for asynchronous version 
of Table. Obtain an instance from a
-041 * {@link AsyncConnection}.
-042 * p
-043 * The implementation is required to be 
thread safe.
-044 * p
-045 * Usually the implementation will not 
throw any exception directly. You need to get the exception
-046 * from the returned {@link 
CompletableFuture}.
-047 * @since 2.0.0
-048 */
-049@InterfaceAudience.Public
-050public interface AsyncTableC extends 
ScanResultConsumerBase {
-051
-052  /**
-053   * Gets the fully qualified table name 
instance of this table.
-054   */
-055  TableName getName();
-056
-057  /**
-058   * Returns the {@link 
org.apache.hadoop.conf.Configuration} object used by this instance.
-059   * p
-060   * The reference returned is not a 
copy, so any change made to it will affect this instance.
-061   */
-062  Configuration getConfiguration();
-063
-064  /**
-065   * Get timeout of each rpc request in 
this Table instance. It will be overridden by a more
-066   * specific rpc timeout config such as 
readRpcTimeout or writeRpcTimeout.
-067   * @see #getReadRpcTimeout(TimeUnit)
-068   * @see #getWriteRpcTimeout(TimeUnit)
-069   * @param unit the unit of time the 
timeout to be represented in
-070   * @return rpc timeout in the specified 
time unit
-071   */
-072  long getRpcTimeout(TimeUnit unit);
-073
-074  /**
-075   * Get timeout of each rpc read request 
in this Table instance.
-076   * @param unit the unit of time the 
timeout to be represented in
-077   * @return read rpc timeout in the 
specified time unit
-078   */
-079  long getReadRpcTimeout(TimeUnit 
unit);
-080
-081  /**
-082   * Get timeout of each rpc write 
request in this Table instance.
-083   * @param unit the unit of time the 
timeout to be represented in
-084   * @return write rpc timeout in the 
specified time unit
-085   */
-086  long getWriteRpcTimeout(TimeUnit 
unit);
-087
-088  /**
-089   * Get timeout of each operation in 
Table instance.
-090   * @param unit the unit of time the 
timeout to be represented in
-091   * @return operation rpc timeout in the 
specified time unit
-092   */
-093  long getOperationTimeout(TimeUnit 
unit);
-094
-095  /**
-096   * Get the timeout of a single 
operation in a scan. It works like operation timeout for other
-097   * operations.
-098   * @param unit the unit of time the 
timeout to be represented in
-099   * @return scan rpc timeout in the 
specified time unit
-100   */
-101  long getScanTimeout(TimeUnit unit);
-102
-103  /**
-104   * Test for the existence of columns in 
the table, as specified by the Get.
-105   * p
-106   * This will return true if the Get 
matches one or more keys, false if not.
-107   * p
-108   * This is a server-side call so it 
prevents any data from being transfered to the client.
-109   * @return true if the specified Get 
matches one or more keys, false if not. The return value will
-110   * be wrapped by a {@link 
CompletableFuture}.
-111   */
-112  default 
CompletableFutureBoolean exists(Get get) {
-113return 
get(toCheckExistenceOnly(get)).thenApply(r - r.getExists());
-114  }
-115
-116  /**
-117   * Extracts certain cells from a given 
row.
-118   * @param get The object that specifies 
what data to fetch and from which row.
-119   * @return The data coming from the 
specified row, if it exists. If the row specified doesn't
-120   * exist, the {@link Result} 
instance returned won't contain any
-121   * {@link 
org.apache.hadoop.hbase.KeyValue}, as indicated by {@link Result#isEmpty()}. 
The
-122   * return value will be wrapped 
by a {@link CompletableFuture}.
-123   */
-124  CompletableFutureResult get(Get 
get);
-125
-126  /**
-127   * Puts some data to the table.
-128   * @param put The data to put.
-129   * @return A {@link 

[32/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

2018-03-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
index 11d7754..1621237 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
@@ -242,27 +242,27 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncAdmin.getClusterMetrics()
+AsyncHBaseAdmin.getClusterMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-RawAsyncHBaseAdmin.getClusterMetrics()
+AsyncAdmin.getClusterMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncHBaseAdmin.getClusterMetrics()
+RawAsyncHBaseAdmin.getClusterMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+AsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-RawAsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+AsyncAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+RawAsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 
@@ -408,11 +408,11 @@
 
 
 void
-RegionLocationFinder.setClusterMetrics(ClusterMetricsstatus)
+BaseLoadBalancer.setClusterMetrics(ClusterMetricsst)
 
 
 void
-BaseLoadBalancer.setClusterMetrics(ClusterMetricsst)
+RegionLocationFinder.setClusterMetrics(ClusterMetricsstatus)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
index a8beae7..804b5df 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
@@ -186,94 +186,94 @@ the order they are declared.
 
 
 boolean
-HTable.checkAndDelete(byte[]row,
+Table.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
   Deletedelete)
-Deprecated.
+Deprecated.
+Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
+
 
 
 
 boolean
-Table.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
   Deletedelete)
-Deprecated.
-Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
-
+Deprecated.
 
 
 
 boolean
-HTable.checkAndMutate(byte[]row,
+Table.checkAndMutate(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
-  RowMutationsrm)
-Deprecated.
+  RowMutationsmutation)
+Deprecated.
+Since 2.0.0. Will be 
removed in 3.0.0. Use 

[32/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

2018-03-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 8fcef9c..24b3c9b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -172,27 +172,27 @@
 
 
 static int
-CellUtil.compare(CellComparatorcomparator,
+PrivateCellUtil.compare(CellComparatorcomparator,
Cellleft,
byte[]key,
intoffset,
intlength)
-Deprecated.
-As of HBase-2.0. Will be 
removed in HBase-3.0
-
+Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
+ the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
+ the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
+ method cannot be used.
 
 
 
 static int
-PrivateCellUtil.compare(CellComparatorcomparator,
+CellUtil.compare(CellComparatorcomparator,
Cellleft,
byte[]key,
intoffset,
intlength)
-Used when a cell needs to be compared with a key byte[] 
such as cases of finding the index from
- the index block, bloom keys from the bloom blocks This byte[] is expected to 
be serialized in
- the KeyValue serialization format If the KeyValue (Cell's) serialization 
format changes this
- method cannot be used.
+Deprecated.
+As of HBase-2.0. Will be 
removed in HBase-3.0
+
 
 
 
@@ -265,12 +265,12 @@
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
+RowIndexSeekerV1.compareKey(CellComparatorcomparator,
   Cellkey)
 
 
 int
-RowIndexSeekerV1.compareKey(CellComparatorcomparator,
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
   Cellkey)
 
 
@@ -282,27 +282,27 @@
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
+RowIndexCodecV1.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-RowIndexCodecV1.createSeeker(CellComparatorcomparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
@@ -340,9 +340,9 @@
 
 
 
-private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+protected CellComparator
+HFileWriterImpl.comparator
+Key comparator.
 
 
 
@@ -356,9 +356,9 @@
 
 
 
-protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
+private CellComparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
@@ -539,15 +539,15 @@
 
 
 private CellComparator
-DefaultStoreFileManager.cellComparator
+StripeStoreFileManager.cellComparator
 
 
 private CellComparator
-StripeStoreFileManager.cellComparator
+DefaultStoreFileManager.cellComparator
 
 
-private CellComparator
-StoreFileWriter.Builder.comparator
+protected CellComparator
+StripeMultiFileWriter.comparator
 
 
 protected CellComparator
@@ -555,31 +555,31 @@
 
 
 private CellComparator
-StoreScanner.comparator
+Segment.comparator
 
 
 private CellComparator
-AbstractMemStore.comparator
+ScanInfo.comparator
 
 
 private CellComparator
-HStoreFile.comparator
+StoreFileWriter.Builder.comparator
 
 
 private CellComparator
-Segment.comparator
+HStoreFile.comparator
 
 
 protected CellComparator
 HRegion.RegionScannerImpl.comparator
 
 
-protected CellComparator
-StripeMultiFileWriter.comparator
+private CellComparator
+AbstractMemStore.comparator
 
 
 private CellComparator
-ScanInfo.comparator
+StoreScanner.comparator
 
 
 protected CellComparator
@@ -609,48 +609,48 @@
 HRegion.getCellComparator()
 
 
-(package private) CellComparator
-StoreFileScanner.getComparator()
+CellComparator
+StoreFileReader.getComparator()
 
 
 protected CellComparator
-AbstractMemStore.getComparator()
+Segment.getComparator()
+Returns the Cell comparator used by this segment
+
 
 
 CellComparator
-StoreFileReader.getComparator()
+ScanInfo.getComparator()
 
 
 CellComparator
-StoreFile.getComparator()
-Get the 

[32/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

2018-03-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index ecf500c..0cd5a4e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -238,8355 +238,8368 @@
 230  public static final String 
HBASE_MAX_CELL_SIZE_KEY = "hbase.server.keyvalue.maxsize";
 231  public static final int 
DEFAULT_MAX_CELL_SIZE = 10485760;
 232
-233  public static final String 
HBASE_REGIONSERVER_MINIBATCH_SIZE =
-234  
"hbase.regionserver.minibatch.size";
-235  public static final int 
DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE = 2;
-236
-237  /**
-238   * This is the global default value for 
durability. All tables/mutations not
-239   * defining a durability or using 
USE_DEFAULT will default to this value.
-240   */
-241  private static final Durability 
DEFAULT_DURABILITY = Durability.SYNC_WAL;
+233  /**
+234   * This is the global default value for 
durability. All tables/mutations not
+235   * defining a durability or using 
USE_DEFAULT will default to this value.
+236   */
+237  private static final Durability 
DEFAULT_DURABILITY = Durability.SYNC_WAL;
+238
+239  public static final String 
HBASE_REGIONSERVER_MINIBATCH_SIZE =
+240  
"hbase.regionserver.minibatch.size";
+241  public static final int 
DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE = 2;
 242
-243  final AtomicBoolean closed = new 
AtomicBoolean(false);
-244
-245  /* Closing can take some time; use the 
closing flag if there is stuff we don't
-246   * want to do while in closing state; 
e.g. like offer this region up to the
-247   * master as a region to close if the 
carrying regionserver is overloaded.
-248   * Once set, it is never cleared.
-249   */
-250  final AtomicBoolean closing = new 
AtomicBoolean(false);
-251
-252  /**
-253   * The max sequence id of flushed data 
on this region. There is no edit in memory that is
-254   * less that this sequence id.
-255   */
-256  private volatile long maxFlushedSeqId = 
HConstants.NO_SEQNUM;
-257
-258  /**
-259   * Record the sequence id of last flush 
operation. Can be in advance of
-260   * {@link #maxFlushedSeqId} when 
flushing a single column family. In this case,
-261   * {@link #maxFlushedSeqId} will be 
older than the oldest edit in memory.
-262   */
-263  private volatile long lastFlushOpSeqId 
= HConstants.NO_SEQNUM;
-264
-265  /**
-266   * The sequence id of the last replayed 
open region event from the primary region. This is used
-267   * to skip entries before this due to 
the possibility of replay edits coming out of order from
-268   * replication.
-269   */
-270  protected volatile long 
lastReplayedOpenRegionSeqId = -1L;
-271  protected volatile long 
lastReplayedCompactionSeqId = -1L;
-272
-273  
//
-274  // Members
-275  
//
-276
-277  // map from a locked row to the context 
for that lock including:
-278  // - CountDownLatch for threads waiting 
on that row
-279  // - the thread that owns the lock 
(allow reentrancy)
-280  // - reference count of (reentrant) 
locks held by the thread
-281  // - the row itself
-282  private final 
ConcurrentHashMapHashedBytes, RowLockContext lockedRows =
-283  new ConcurrentHashMap();
-284
-285  protected final Mapbyte[], 
HStore stores =
-286  new 
ConcurrentSkipListMap(Bytes.BYTES_RAWCOMPARATOR);
+243  public static final String 
WAL_HSYNC_CONF_KEY = "hbase.wal.hsync";
+244  public static final boolean 
DEFAULT_WAL_HSYNC = false;
+245
+246  final AtomicBoolean closed = new 
AtomicBoolean(false);
+247
+248  /* Closing can take some time; use the 
closing flag if there is stuff we don't
+249   * want to do while in closing state; 
e.g. like offer this region up to the
+250   * master as a region to close if the 
carrying regionserver is overloaded.
+251   * Once set, it is never cleared.
+252   */
+253  final AtomicBoolean closing = new 
AtomicBoolean(false);
+254
+255  /**
+256   * The max sequence id of flushed data 
on this region. There is no edit in memory that is
+257   * less that this sequence id.
+258   */
+259  private volatile long maxFlushedSeqId = 
HConstants.NO_SEQNUM;
+260
+261  /**
+262   * Record the sequence id of last flush 
operation. Can be in advance of
+263   * {@link #maxFlushedSeqId} when 
flushing a single column family. In this case,
+264   * {@link #maxFlushedSeqId} will be 
older than the oldest edit in memory.
+265   */
+266  private volatile long lastFlushOpSeqId 
= 

[32/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

2018-03-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
index f47d627..c3d225c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
@@ -117,219 +117,219 @@
 109   */
 110  public static boolean 
archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)
 111  throws IOException {
-112if (LOG.isDebugEnabled()) {
-113  LOG.debug("ARCHIVING " + 
regionDir.toString());
-114}
-115
-116// otherwise, we archive the files
-117// make sure we can archive
-118if (tableDir == null || regionDir == 
null) {
-119  LOG.error("No archive directory 
could be found because tabledir (" + tableDir
-120  + ") or regiondir (" + 
regionDir + "was null. Deleting files instead.");
-121  deleteRegionWithoutArchiving(fs, 
regionDir);
-122  // we should have archived, but 
failed to. Doesn't matter if we deleted
-123  // the archived files correctly or 
not.
-124  return false;
-125}
-126
-127// make sure the regiondir lives 
under the tabledir
-128
Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));
-129Path regionArchiveDir = 
HFileArchiveUtil.getRegionArchiveDir(rootdir,
-130FSUtils.getTableName(tableDir),
-131regionDir.getName());
-132
-133FileStatusConverter getAsFile = new 
FileStatusConverter(fs);
-134// otherwise, we attempt to archive 
the store files
-135
-136// build collection of just the store 
directories to archive
-137CollectionFile toArchive = 
new ArrayList();
-138final PathFilter dirFilter = new 
FSUtils.DirFilter(fs);
-139PathFilter nonHidden = new 
PathFilter() {
-140  @Override
-141  public boolean accept(Path file) 
{
-142return dirFilter.accept(file) 
 !file.getName().toString().startsWith(".");
-143  }
-144};
-145FileStatus[] storeDirs = 
FSUtils.listStatus(fs, regionDir, nonHidden);
-146// if there no files, we can just 
delete the directory and return;
-147if (storeDirs == null) {
-148  LOG.debug("Region directory " + 
regionDir + " empty.");
-149  return 
deleteRegionWithoutArchiving(fs, regionDir);
-150}
-151
-152// convert the files in the region to 
a File
-153
toArchive.addAll(Lists.transform(Arrays.asList(storeDirs), getAsFile));
-154LOG.debug("Archiving " + 
toArchive);
-155ListFile failedArchive = 
resolveAndArchive(fs, regionArchiveDir, toArchive,
-156
EnvironmentEdgeManager.currentTime());
-157if (!failedArchive.isEmpty()) {
-158  throw new 
FailedArchiveException("Failed to archive/delete all the files for region:"
-159  + regionDir.getName() + " into 
" + regionArchiveDir
-160  + ". Something is probably awry 
on the filesystem.",
-161  
Collections2.transform(failedArchive, FUNC_FILE_TO_PATH));
-162}
-163// if that was successful, then we 
delete the region
-164return 
deleteRegionWithoutArchiving(fs, regionDir);
-165  }
-166
-167  /**
-168   * Remove from the specified region the 
store files of the specified column family,
-169   * either by archiving them or outright 
deletion
-170   * @param fs the filesystem where the 
store files live
-171   * @param conf {@link Configuration} to 
examine to determine the archive directory
-172   * @param parent Parent region hosting 
the store files
-173   * @param tableDir {@link Path} to 
where the table is being stored (for building the archive path)
-174   * @param family the family hosting the 
store files
-175   * @throws IOException if the files 
could not be correctly disposed.
-176   */
-177  public static void 
archiveFamily(FileSystem fs, Configuration conf,
-178  RegionInfo parent, Path tableDir, 
byte[] family) throws IOException {
-179Path familyDir = new Path(tableDir, 
new Path(parent.getEncodedName(), Bytes.toString(family)));
-180archiveFamilyByFamilyDir(fs, conf, 
parent, familyDir, family);
-181  }
-182
-183  /**
-184   * Removes from the specified region 
the store files of the specified column family,
-185   * either by archiving them or outright 
deletion
-186   * @param fs the filesystem where the 
store files live
-187   * @param conf {@link Configuration} to 
examine to determine the archive directory
-188   * @param parent Parent region hosting 
the store files
-189   * @param familyDir {@link Path} to 
where the family is being stored
-190   * @param family the family hosting the 
store files
-191   * @throws IOException if the files 
could not be correctly disposed.
-192   */
-193  public static void 

[32/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
index 8d19f36..0c03788 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
@@ -207,7 +207,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.HTableDescriptor
-addCoprocessor,
 addCoprocessor,
 addCoprocessorWithSpec,
 addFamily,
 compareTo,
 equals,
 getColumnFamilies,
 getColumnFamily, getColumnFamilyCount,
 getColumnFamilyNames,
 getConfiguration,
 getConfigurationValue,
 getCoprocessors,
 getDurability,
 getFamilies,
 getFamiliesKeys,
 getFamily,
 getFlushPolicyClassName,
 getMaxFileSize,
 getMemStoreFlushSize,
 getNameAsString,
 getOwnerString,
 getPriority,
 getRegionReplication,
 getRegionSplitPolic
 yClassName, getTableName,
 getValue,
 getValue,
 getValue,
 getValues,
 hasColumnFamily,
 hasCoprocessor,
 hasFamily,
 hashCode<
 /a>, hasRegionMemstoreReplication,
 hasRegionMemStoreReplication,
 isCompactionEnabled,
 isMetaRegion,
 isMetaTable,
 isNormalizationEnabled,
 isReadOnly,
 isRootRegion,
 modifyFamily, 
parseFrom,
 remove,
 remove,
 remove,
 removeConfiguration,
 removeCoprocessor,
 removeFamily,
 setCompac
 tionEnabled, setConfiguration,
 setDurability,
 setFlushPolicyClassName,
 setMaxFileSize,
 setMemStoreFlushSize,
 setNormalizationEnabled,
 setOwner,
 setOwnerString, 
setPriority,
 setReadOnly,
 setRegionMemstoreReplication,
 setRegionMemStoreReplication,
 setRegionReplication,
 setRegionSplitPolicyClassName,
 setValue,
 setValue,
 setValue,
 toByteArray,
 toString,
 toStringCustomizedValues,
 toStringTableAttributes
+addCoprocessor,
 addCoprocessor,
 addCoprocessorWithSpec,
 addFamily,
 compareTo,
 equals,
 getColumnFamilies,
 getColumnFamily, getColumnFamilyCount,
 getColumnFamilyNames,
 getConfiguration,
 getConfigurationValue,
 getCoprocessorDescriptors,
 getCoprocessors,
 getDurability,
 getFamilies,
 getFamiliesKeys,
 getFamily,
 getFlushPolicyClassName,
 getMaxFileSize,
 getMemStoreFlushSize,
 getNameAsString,
 getOwnerString,
 getPriority,
 getRegionReplicati
 on, getRegionSplitPolicyClassName,
 getTableName,
 getValue,
 getValue,
 getValue,
 getValues,
 hasColumnFamily,
 hasCoprocessor,
 hasFamily, hashCode,
 hasRegionMemstoreReplication,
 hasRegionMemStoreReplication,
 isCompactionEnabled,
 isMetaRegion,
 isMetaTable,
 isNormalizationEnabled,
 isReadOnly,
 isRootRegion, modifyFamily,
 parseFrom,
 remove,
 remove,
 remove,
 removeConfiguration,
 removeCoprocessor,
 removeFamily, setCompactionEnabled,
 setConfiguration,
 setDurability,
 setFlushPolicyClassName,
 setMaxFileSize,
 setMemStoreFlushSize,
 setNormalizationEnabled,
 setOwner,
 setOwnerString,
 setPriority,
 setReadOnly,
 setRegionMemstoreReplication,
 setRegionMemStoreReplication,
 setRegionReplication,
 setRegionSplitPolicyClassName,
 setValue,
 setValue,
 setValue,
 toByteArray,
 toString,
 toStringCustomizedValues,
 toStringTableAttributes
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
index acdc691..373c742 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
@@ -183,10 +183,9 @@ public interface getComparator(https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorColumnFamilyDescriptorcfComparator)
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-getCoprocessors()
-Return the list of attached co-processor represented by 
their name
- className
+https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionCoprocessorDescriptor
+getCoprocessorDescriptors()
+Return the list of attached co-processor represented
 
 
 
@@ -391,18 +390,17 @@ static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/C
 
 
 
-
+
 
 
 
 
-getCoprocessors
-https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class 

[32/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

2018-03-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
index 48da12a..89ff005 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static enum MasterRpcServices.BalanceSwitchMode
+static enum MasterRpcServices.BalanceSwitchMode
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumMasterRpcServices.BalanceSwitchMode
 
 
@@ -210,7 +210,7 @@ the order they are declared.
 
 
 SYNC
-public static finalMasterRpcServices.BalanceSwitchMode SYNC
+public static finalMasterRpcServices.BalanceSwitchMode SYNC
 
 
 
@@ -219,7 +219,7 @@ the order they are declared.
 
 
 ASYNC
-public static finalMasterRpcServices.BalanceSwitchMode ASYNC
+public static finalMasterRpcServices.BalanceSwitchMode ASYNC
 
 
 



[32/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

2018-03-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
index cabc286..e959408 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
@@ -128,7 +128,7 @@
 
 
 boolean
-ClusterConnection.isMasterRunning()
+ConnectionImplementation.isMasterRunning()
 Deprecated.
 this has been deprecated 
without a replacement
 
@@ -136,7 +136,7 @@
 
 
 boolean
-ConnectionImplementation.isMasterRunning()
+ClusterConnection.isMasterRunning()
 Deprecated.
 this has been deprecated 
without a replacement
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
index 26611ed..bba209a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
@@ -270,31 +270,31 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
-Get a namespace descriptor by name
-
+AsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-RawAsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+AsyncAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Get a namespace descriptor by name
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+RawAsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncAdmin.listNamespaceDescriptors()
-List available namespace descriptors
-
+AsyncHBaseAdmin.listNamespaceDescriptors()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-RawAsyncHBaseAdmin.listNamespaceDescriptors()
+AsyncAdmin.listNamespaceDescriptors()
+List available namespace descriptors
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncHBaseAdmin.listNamespaceDescriptors()
+RawAsyncHBaseAdmin.listNamespaceDescriptors()
 
 
 
@@ -307,9 +307,7 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncAdmin.createNamespace(NamespaceDescriptordescriptor)
-Create a new namespace.
-

[32/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

2018-03-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
index 1621237..11d7754 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ClusterMetrics.html
@@ -242,27 +242,27 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncHBaseAdmin.getClusterMetrics()
+AsyncAdmin.getClusterMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncAdmin.getClusterMetrics()
+RawAsyncHBaseAdmin.getClusterMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-RawAsyncHBaseAdmin.getClusterMetrics()
+AsyncHBaseAdmin.getClusterMetrics()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+AsyncAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-AsyncAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+RawAsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
-RawAsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+AsyncHBaseAdmin.getClusterMetrics(https://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 
@@ -408,11 +408,11 @@
 
 
 void
-BaseLoadBalancer.setClusterMetrics(ClusterMetricsst)
+RegionLocationFinder.setClusterMetrics(ClusterMetricsstatus)
 
 
 void
-RegionLocationFinder.setClusterMetrics(ClusterMetricsstatus)
+BaseLoadBalancer.setClusterMetrics(ClusterMetricsst)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
index 804b5df..a8beae7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CompareOperator.html
@@ -186,94 +186,94 @@ the order they are declared.
 
 
 boolean
-Table.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
   Deletedelete)
-Deprecated.
-Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
-
+Deprecated.
 
 
 
 boolean
-HTable.checkAndDelete(byte[]row,
+Table.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
   Deletedelete)
-Deprecated.
+Deprecated.
+Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
+
 
 
 
 boolean
-Table.checkAndMutate(byte[]row,
+HTable.checkAndMutate(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   byte[]value,
-  RowMutationsmutation)
-Deprecated.
-Since 2.0.0. Will be 
removed in 3.0.0. Use Table.checkAndMutate(byte[],
 byte[])
-
+  

[32/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

2018-03-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
index 7426697..c0243ed 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStateStore.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":9,"i4":9,"i5":9,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":9,"i5":9,"i6":9,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -205,10 +205,14 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 deleteRegions(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforegions)
 
 
+private long
+getOpenSeqNumForParentRegion(RegionInforegion)
+
+
 private int
 getRegionReplication(TableDescriptorhtd)
 
-
+
 (package private) static ServerName
 getRegionServer(Resultr,
intreplicaId)
@@ -216,55 +220,63 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  where the region is transitioning.
 
 
-
+
 static RegionState.State
 getRegionState(Resultr,
   intreplicaId)
 Pull the region state from a catalog table Result.
 
 
-
+
 private static byte[]
 getServerNameColumn(intreplicaId)
 
-
+
 private static byte[]
 getStateColumn(intreplicaId)
 
-
+
 private TableDescriptor
 getTableDescriptor(TableNametableName)
 
-
+
+private boolean
+hasSerialReplicationScope(TableDescriptorhtd)
+
+
+private boolean
+hasSerialReplicationScope(TableNametableName)
+
+
 void
-mergeRegions(RegionInfoparent,
+mergeRegions(RegionInfochild,
 RegionInfohriA,
 RegionInfohriB,
 ServerNameserverName)
 
-
+
 void
 splitRegion(RegionInfoparent,
RegionInfohriA,
RegionInfohriB,
ServerNameserverName)
 
-
+
 private void
 updateMetaLocation(RegionInforegionInfo,
   ServerNameserverName)
 
-
+
 private void
 updateRegionLocation(RegionInforegionInfo,
 RegionState.Statestate,
 Putput)
 
-
+
 void
 updateRegionLocation(RegionStates.RegionStateNoderegionStateNode)
 
-
+
 private void
 updateUserRegionLocation(RegionInforegionInfo,
 RegionState.Statestate,
@@ -273,11 +285,11 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 longopenSeqNum,
 longpid)
 
-
+
 void
 visitMeta(RegionStateStore.RegionStateVisitorvisitor)
 
-
+
 private void
 visitMetaEntry(RegionStateStore.RegionStateVisitorvisitor,
   Resultresult)
@@ -444,7 +456,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 updateRegionLocation
-privatevoidupdateRegionLocation(RegionInforegionInfo,
+privatevoidupdateRegionLocation(RegionInforegionInfo,
   RegionState.Statestate,
   Putput)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -454,13 +466,27 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+
+
+
+
+
+getOpenSeqNumForParentRegion
+privatelonggetOpenSeqNumForParentRegion(RegionInforegion)
+   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+Throws:
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+
+
 
 
 
 
 
 splitRegion
-publicvoidsplitRegion(RegionInfoparent,
+publicvoidsplitRegion(RegionInfoparent,
 RegionInfohriA,
 RegionInfohriB,
 ServerNameserverName)
@@ -477,7 +503,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 mergeRegions
-publicvoidmergeRegions(RegionInfoparent,
+publicvoidmergeRegions(RegionInfochild,
  RegionInfohriA,
  RegionInfohriB,
  ServerNameserverName)
@@ -494,7 +520,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 deleteRegion

[32/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

2018-03-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ServerMetrics.html 
b/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
index 3b46b9c..92f46bb 100644
--- a/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
+++ b/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
@@ -127,7 +127,7 @@ public interface Method and Description
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getCoprocessorNames()
 Return the RegionServer-level and Region-level 
coprocessors
 
@@ -145,7 +145,7 @@ public interface getMaxHeapSize()
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetrics
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetrics
 getRegionMetrics()
 
 
@@ -155,7 +155,7 @@ public interface 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSource
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSource
 getReplicationLoadSourceList()
 Call directly from client such as hbase shell
 
@@ -271,7 +271,7 @@ public interface 
 
 getReplicationLoadSourceList
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSourcegetReplicationLoadSourceList()
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSourcegetReplicationLoadSourceList()
 Call directly from client such as hbase shell
 
 Returns:
@@ -300,7 +300,7 @@ public interface 
 
 getRegionMetrics
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetricsgetRegionMetrics()
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetricsgetRegionMetrics()
 
 Returns:
 region load metrics
@@ -313,7 +313,7 @@ public interface 
 
 getCoprocessorNames
-http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetCoprocessorNames()
+https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetCoprocessorNames()
 Return the RegionServer-level and Region-level 
coprocessors
 
 Returns:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/ServerName.html
index 565ca9a..e64412a 100644
--- a/apidocs/org/apache/hadoop/hbase/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/ServerName.html
@@ -97,7 +97,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
 org.apache.hadoop.hbase.ServerName
@@ -109,14 +109,14 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableServerName
+https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableServerName
 
 
 
 @InterfaceAudience.Public
 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
index f06bd9c..3341a8c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
@@ -1055,7 +1055,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getMaxKeyValueSize
-public staticintgetMaxKeyValueSize(org.apache.hadoop.conf.Configurationconf)
+public staticintgetMaxKeyValueSize(org.apache.hadoop.conf.Configurationconf)
 
 Returns:
 maxKeyValueSize from configuration.
@@ -1068,7 +1068,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getConfiguration
-publicorg.apache.hadoop.conf.ConfigurationgetConfiguration()
+publicorg.apache.hadoop.conf.ConfigurationgetConfiguration()
 Description copied from 
interface:Table
 Returns the Configuration object used by this 
instance.
  
@@ -1086,7 +1086,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getName
-publicTableNamegetName()
+publicTableNamegetName()
 Description copied from 
interface:Table
 Gets the fully qualified table name instance of this 
table.
 
@@ -1101,7 +1101,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getConnection
-protectedConnectiongetConnection()
+protectedConnectiongetConnection()
 INTERNAL Used by unit tests and tools to do 
low-level
  manipulations.
 
@@ -1117,7 +1117,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 getTableDescriptor
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicHTableDescriptorgetTableDescriptor()
+publicHTableDescriptorgetTableDescriptor()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deprecated.
 Description copied from 
interface:Table
@@ -1136,7 +1136,7 @@ public
 
 getDescriptor
-publicTableDescriptorgetDescriptor()
+publicTableDescriptorgetDescriptor()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:Table
 Gets the table 
descriptor for this table.
@@ -1154,7 +1154,7 @@ public
 
 getKeysAndRegionsInRange
-privatePairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocationgetKeysAndRegionsInRange(byte[]startKey,
+privatePairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocationgetKeysAndRegionsInRange(byte[]startKey,
   
byte[]endKey,
   
booleanincludeEndKey)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -1180,7 +1180,7 @@ public
 
 getKeysAndRegionsInRange
-privatePairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocationgetKeysAndRegionsInRange(byte[]startKey,
+privatePairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocationgetKeysAndRegionsInRange(byte[]startKey,
   
byte[]endKey,
   
booleanincludeEndKey,
   
booleanreload)
@@ -1208,7 +1208,7 @@ public
 
 getScanner
-publicResultScannergetScanner(Scanscan)
+publicResultScannergetScanner(Scanscan)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 The underlying HTable must 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferExtendedCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferExtendedCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferExtendedCell.html
index d143ef8..4583895 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferExtendedCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferExtendedCell.html
@@ -258,7 +258,7 @@
 250
 251@Override
 252public long heapSize() {
-253  long sum = HEAP_SIZE_OVERHEAD + 
estimatedHeapSizeOf(cell);
+253  long sum = HEAP_SIZE_OVERHEAD + 
estimatedSizeOfCell(cell);
 254  if (this.tags != null) {
 255sum += 
ClassSize.sizeOf(this.tags);
 256  }
@@ -454,7 +454,7 @@
 446
 447@Override
 448public long heapSize() {
-449  long sum = HEAP_SIZE_OVERHEAD + 
estimatedHeapSizeOf(cell);
+449  long sum = HEAP_SIZE_OVERHEAD + 
estimatedSizeOfCell(cell);
 450  // this.tags is on heap byte[]
 451  if (this.tags != null) {
 452sum += 
ClassSize.sizeOf(this.tags);
@@ -2791,192 +2791,193 @@
 2783   * {@link HeapSize} we call {@link 
HeapSize#heapSize()} so cell can give a correct value. In other
 2784   * cases we just consider the bytes 
occupied by the cell components ie. row, CF, qualifier,
 2785   * timestamp, type, value and tags.
-2786   * @param cell
-2787   * @return estimate of the heap 
space
-2788   */
-2789  public static long 
estimatedHeapSizeOf(final Cell cell) {
-2790if (cell instanceof HeapSize) {
-2791  return ((HeapSize) 
cell).heapSize();
-2792}
-2793// TODO: Add sizing of references 
that hold the row, family, etc., arrays.
-2794return 
estimatedSerializedSizeOf(cell);
-2795  }
-2796
-2797  /**
-2798   * This method exists just to 
encapsulate how we serialize keys. To be replaced by a factory that
-2799   * we query to figure what the Cell 
implementation is and then, what serialization engine to use
-2800   * and further, how to serialize the 
key for inclusion in hfile index. TODO.
-2801   * @param cell
-2802   * @return The key portion of the Cell 
serialized in the old-school KeyValue way or null if passed
-2803   * a null 
codecell/code
-2804   */
-2805  public static byte[] 
getCellKeySerializedAsKeyValueKey(final Cell cell) {
-2806if (cell == null) return null;
-2807byte[] b = new 
byte[KeyValueUtil.keyLength(cell)];
-2808KeyValueUtil.appendKeyTo(cell, b, 
0);
-2809return b;
-2810  }
-2811
-2812  /**
-2813   * Create a Cell that is smaller than 
all other possible Cells for the given Cell's row.
-2814   * @param cell
-2815   * @return First possible Cell on 
passed Cell's row.
-2816   */
-2817  public static Cell 
createFirstOnRow(final Cell cell) {
-2818if (cell instanceof 
ByteBufferExtendedCell) {
-2819  return new 
FirstOnRowByteBufferExtendedCell(
-2820  ((ByteBufferExtendedCell) 
cell).getRowByteBuffer(),
-2821  ((ByteBufferExtendedCell) 
cell).getRowPosition(), cell.getRowLength());
-2822}
-2823return new 
FirstOnRowCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
-2824  }
-2825
-2826  public static Cell 
createFirstOnRow(final byte[] row, int roffset, short rlength) {
-2827return new FirstOnRowCell(row, 
roffset, rlength);
-2828  }
-2829
-2830  public static Cell 
createFirstOnRow(final byte[] row, final byte[] family, final byte[] col) {
-2831return createFirstOnRow(row, 0, 
(short) row.length, family, 0, (byte) family.length, col, 0,
-2832col.length);
-2833  }
-2834
-2835  public static Cell 
createFirstOnRow(final byte[] row, int roffset, short rlength,
-2836  final byte[] family, int foffset, 
byte flength, final byte[] col, int coffset, int clength) {
-2837return new FirstOnRowColCell(row, 
roffset, rlength, family, foffset, flength, col, coffset,
-2838clength);
-2839  }
-2840
-2841  public static Cell 
createFirstOnRow(final byte[] row) {
-2842return createFirstOnRow(row, 0, 
(short) row.length);
-2843  }
-2844
-2845  public static Cell 
createFirstOnRowFamily(Cell cell, byte[] fArray, int foff, int flen) {
-2846if (cell instanceof 
ByteBufferExtendedCell) {
-2847  return new 
FirstOnRowColByteBufferExtendedCell(
-2848  ((ByteBufferExtendedCell) 
cell).getRowByteBuffer(),
-2849  ((ByteBufferExtendedCell) 
cell).getRowPosition(), cell.getRowLength(),
-2850  ByteBuffer.wrap(fArray), foff, 
(byte) flen, HConstants.EMPTY_BYTE_BUFFER, 0, 0);
-2851}
-2852return new 
FirstOnRowColCell(cell.getRowArray(), cell.getRowOffset(), 
cell.getRowLength(),
-2853fArray, foff, (byte) flen, 
HConstants.EMPTY_BYTE_ARRAY, 0, 0);
-2854  }
-2855
-2856  

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
index 00483be..c27b109 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
@@ -396,888 +396,887 @@
 388new 
ConcurrentHashMapCancellableRegionServerCallable, Boolean());
 389this.asyncProcess = asyncProcess;
 390this.errorsByServer = 
createServerErrorTracker();
-391this.errors = 
(asyncProcess.globalErrors != null)
-392? asyncProcess.globalErrors : new 
BatchErrors();
-393this.operationTimeout = 
task.getOperationTimeout();
-394this.rpcTimeout = 
task.getRpcTimeout();
-395this.currentCallable = 
task.getCallable();
-396if (task.getCallable() == null) {
-397  tracker = new 
RetryingTimeTracker().start();
-398}
-399  }
-400
-401  @VisibleForTesting
-402  protected 
SetCancellableRegionServerCallable getCallsInProgress() {
-403return callsInProgress;
-404  }
-405
-406  @VisibleForTesting
-407  SingleServerRequestRunnable 
createSingleServerRequest(MultiAction multiAction, int numAttempt, ServerName 
server,
-408
SetCancellableRegionServerCallable callsInProgress) {
-409return new 
SingleServerRequestRunnable(multiAction, numAttempt, server, 
callsInProgress);
-410  }
-411
-412  /**
-413   * Group a list of actions per region 
servers, and send them.
-414   *
-415   * @param currentActions - the list of 
row to submit
-416   * @param numAttempt - the current 
numAttempt (first attempt is 1)
-417   */
-418  void 
groupAndSendMultiAction(ListAction currentActions, int numAttempt) {
-419MapServerName, MultiAction 
actionsByServer = new HashMap();
-420
-421boolean isReplica = false;
-422ListAction 
unknownReplicaActions = null;
-423for (Action action : currentActions) 
{
-424  RegionLocations locs = 
findAllLocationsOrFail(action, true);
-425  if (locs == null) continue;
-426  boolean isReplicaAction = 
!RegionReplicaUtil.isDefaultReplica(action.getReplicaId());
-427  if (isReplica  
!isReplicaAction) {
-428// This is the property of the 
current implementation, not a requirement.
-429throw new AssertionError("Replica 
and non-replica actions in the same retry");
-430  }
-431  isReplica = isReplicaAction;
-432  HRegionLocation loc = 
locs.getRegionLocation(action.getReplicaId());
-433  if (loc == null || 
loc.getServerName() == null) {
-434if (isReplica) {
-435  if (unknownReplicaActions == 
null) {
-436unknownReplicaActions = new 
ArrayList(1);
-437  }
-438  
unknownReplicaActions.add(action);
-439} else {
-440  // TODO: relies on primary 
location always being fetched
-441  manageLocationError(action, 
null);
-442}
-443  } else {
-444byte[] regionName = 
loc.getRegionInfo().getRegionName();
-445
AsyncProcess.addAction(loc.getServerName(), regionName, action, 
actionsByServer, nonceGroup);
-446  }
-447}
-448boolean doStartReplica = (numAttempt 
== 1  !isReplica  hasAnyReplicaGets);
-449boolean hasUnknown = 
unknownReplicaActions != null  !unknownReplicaActions.isEmpty();
-450
-451if (!actionsByServer.isEmpty()) {
-452  // If this is a first attempt to 
group and send, no replicas, we need replica thread.
-453  sendMultiAction(actionsByServer, 
numAttempt, (doStartReplica  !hasUnknown)
-454  ? currentActions : null, 
numAttempt  1  !hasUnknown);
-455}
-456
-457if (hasUnknown) {
-458  actionsByServer = new 
HashMap();
-459  for (Action action : 
unknownReplicaActions) {
-460HRegionLocation loc = 
getReplicaLocationOrFail(action);
-461if (loc == null) continue;
-462byte[] regionName = 
loc.getRegionInfo().getRegionName();
-463
AsyncProcess.addAction(loc.getServerName(), regionName, action, 
actionsByServer, nonceGroup);
-464  }
-465  if (!actionsByServer.isEmpty()) {
-466sendMultiAction(
-467actionsByServer, numAttempt, 
doStartReplica ? currentActions : null, true);
-468  }
-469}
-470  }
-471
-472  private HRegionLocation 
getReplicaLocationOrFail(Action action) {
-473// We are going to try get location 
once again. For each action, we'll do it once
-474// from cache, because the previous 
calls in the loop might populate it.
-475int replicaId = 
action.getReplicaId();
-476RegionLocations locs = 
findAllLocationsOrFail(action, true);
-477if (locs == null) return null; // 
manageError already called
-478

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
index 5ba2deb..024eca4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
@@ -106,11 +106,11 @@
 
 
 private AsyncConnectionImpl
-RawAsyncTableImpl.conn
+AsyncClientScanner.conn
 
 
 private AsyncConnectionImpl
-AsyncBatchRpcRetryingCaller.conn
+AsyncRpcRetryingCallerFactory.conn
 
 
 private AsyncConnectionImpl
@@ -118,19 +118,19 @@
 
 
 private AsyncConnectionImpl
-RegionCoprocessorRpcChannelImpl.conn
+RawAsyncTableImpl.conn
 
 
-protected AsyncConnectionImpl
-AsyncRpcRetryingCaller.conn
+private AsyncConnectionImpl
+RegionCoprocessorRpcChannelImpl.conn
 
 
 private AsyncConnectionImpl
-AsyncClientScanner.conn
+AsyncBatchRpcRetryingCaller.conn
 
 
-private AsyncConnectionImpl
-AsyncRpcRetryingCallerFactory.conn
+protected AsyncConnectionImpl
+AsyncRpcRetryingCaller.conn
 
 
 private AsyncConnectionImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
index e71ca45..d6b1759 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncMasterRequestRpcRetryingCaller.CallableT
-AsyncMasterRequestRpcRetryingCaller.callable
-
-
 private AsyncMasterRequestRpcRetryingCaller.CallableT
 AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.callable
 
+
+private AsyncMasterRequestRpcRetryingCaller.CallableT
+AsyncMasterRequestRpcRetryingCaller.callable
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
index 60fbcff..f31564e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
@@ -106,11 +106,11 @@
 
 
 private AsyncProcess
-BufferedMutatorImpl.ap
+HTableMultiplexer.FlushWorker.ap
 
 
 private AsyncProcess
-HTableMultiplexer.FlushWorker.ap
+BufferedMutatorImpl.ap
 
 
 private AsyncProcess
@@ -137,11 +137,11 @@
 
 
 AsyncProcess
-ClusterConnection.getAsyncProcess()
+ConnectionImplementation.getAsyncProcess()
 
 
 AsyncProcess
-ConnectionImplementation.getAsyncProcess()
+ClusterConnection.getAsyncProcess()
 
 
 (package private) AsyncProcess

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
index c610e19..9a8d746 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
@@ -106,11 +106,11 @@
 
 
 private AsyncRegionLocator
-AsyncConnectionImpl.locator
+AsyncTableRegionLocatorImpl.locator
 
 
 private AsyncRegionLocator
-AsyncTableRegionLocatorImpl.locator
+AsyncConnectionImpl.locator
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
index a970ce5..06fd193 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
@@ -126,13 +126,13 @@
 
 
 
-(package private) AsyncRegistry
-AsyncConnectionImpl.registry
-
-
 private AsyncRegistry
 AsyncMetaRegionLocator.registry
 
+
+(package private) AsyncRegistry
+AsyncConnectionImpl.registry
+
 
 
 


[32/51] [partial] hbase-site git commit: Published site at .

2018-02-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 6410159..46f185a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -239,15 +239,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-ServerMetricsBuilder.serverName
+HRegionLocation.serverName
 
 
 private ServerName
-ServerMetricsBuilder.ServerMetricsImpl.serverName
+ServerMetricsBuilder.serverName
 
 
 private ServerName
-HRegionLocation.serverName
+ServerMetricsBuilder.ServerMetricsImpl.serverName
 
 
 
@@ -306,7 +306,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getMasterName()
+ClusterMetrics.getMasterName()
+Returns detailed information about the current master ServerName.
+
 
 
 ServerName
@@ -316,15 +318,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterMetrics.getMasterName()
-Returns detailed information about the current master ServerName.
-
+ClusterMetricsBuilder.ClusterMetricsImpl.getMasterName()
 
 
 ServerName
-ServerLoad.getServerName()
-Deprecated.
-
+HRegionLocation.getServerName()
 
 
 ServerName
@@ -332,11 +330,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ServerMetricsBuilder.ServerMetricsImpl.getServerName()
+ServerLoad.getServerName()
+Deprecated.
+
 
 
 ServerName
-HRegionLocation.getServerName()
+ServerMetricsBuilder.ServerMetricsImpl.getServerName()
 
 
 ServerName
@@ -405,7 +405,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getBackupMasterNames()
+ClusterMetrics.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -415,7 +415,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getBackupMasterNames()
+ClusterMetricsBuilder.ClusterMetricsImpl.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -428,7 +428,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getDeadServerNames()
+ClusterMetrics.getDeadServerNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -438,7 +438,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getDeadServerNames()
+ClusterMetricsBuilder.ClusterMetricsImpl.getDeadServerNames()
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerLoad
@@ -448,7 +448,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetricsBuilder.ClusterMetricsImpl.getLiveServerMetrics()
+ClusterMetrics.getLiveServerMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
@@ -458,7 +458,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetrics.getLiveServerMetrics()
+ClusterMetricsBuilder.ClusterMetricsImpl.getLiveServerMetrics()
 
 
 static PairRegionInfo,ServerName
@@ -858,31 +858,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-FastFailInterceptorContext.server
+AsyncRequestFutureImpl.SingleServerRequestRunnable.server
 
 
 private ServerName

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 84b554e..0c9079d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2055,119 +2055,119 @@ service.
 
 
 private TableName
-SnapshotDescription.table
+RegionCoprocessorRpcChannel.table
 
 
 private TableName
-RegionCoprocessorRpcChannel.table
+SnapshotDescription.table
 
 
 private TableName
-RawAsyncTableImpl.tableName
+HRegionLocator.tableName
 
 
 private TableName
-RegionServerCallable.tableName
+ScannerCallableWithReplicas.tableName
 
 
 protected TableName
-RegionAdminServiceCallable.tableName
+ClientScanner.tableName
 
 
 private TableName
-BufferedMutatorImpl.tableName
+AsyncClientScanner.tableName
 
 
 private TableName
-AsyncProcessTask.tableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
 
 
 private TableName
-AsyncProcessTask.Builder.tableName
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
 
 
 private TableName
-AsyncRequestFutureImpl.tableName
+RegionInfoBuilder.tableName
 
 
-protected TableName
-TableBuilderBase.tableName
+private TableName
+RegionInfoBuilder.MutableRegionInfo.tableName
 
 
 private TableName
-AsyncBatchRpcRetryingCaller.tableName
+RawAsyncTableImpl.tableName
 
 
 private TableName
-RegionInfoBuilder.tableName
+RegionCoprocessorRpcChannelImpl.tableName
 
 
 private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName
+AsyncTableRegionLocatorImpl.tableName
 
 
-private TableName
-HTable.tableName
+protected TableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
-TableState.tableName
+HTable.tableName
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+private TableName
+BufferedMutatorImpl.tableName
 
 
-protected TableName
-AsyncTableBuilderBase.tableName
+private TableName
+AsyncBatchRpcRetryingCaller.tableName
 
 
 private TableName
-AsyncSingleRequestRpcRetryingCaller.tableName
+BufferedMutatorParams.tableName
 
 
 private TableName
-ScannerCallableWithReplicas.tableName
+HBaseAdmin.TableFuture.tableName
 
 
-protected TableName
-RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
+private TableName
+AsyncRequestFutureImpl.tableName
 
 
 private TableName
-AsyncTableRegionLocatorImpl.tableName
+AsyncProcessTask.tableName
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+AsyncProcessTask.Builder.tableName
 
 
-private TableName
-RegionCoprocessorRpcChannelImpl.tableName
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
 
 
-protected TableName
-ClientScanner.tableName
+private TableName
+RegionServerCallable.tableName
 
 
 private TableName
-BufferedMutatorParams.tableName
+AsyncSingleRequestRpcRetryingCaller.tableName
 
 
-private TableName
-AsyncClientScanner.tableName
+protected TableName
+TableBuilderBase.tableName
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
+protected TableName
+AsyncTableBuilderBase.tableName
 
 
 private TableName
-HRegionLocator.tableName
+TableState.tableName
 
 
 
@@ -2209,83 +2209,83 @@ service.
 
 
 TableName
-RawAsyncTableImpl.getName()
+AsyncTable.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-RegionLocator.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-BufferedMutatorImpl.getName()
+HRegionLocator.getName()
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table name instance of the table 
whose region we want to locate.
 
 
 
 TableName
-HTable.getName()
+AsyncTableImpl.getName()
 
 
 TableName
-AsyncBufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this
- AsyncBufferedMutator writes to.
-
+RawAsyncTableImpl.getName()
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
-
+AsyncTableRegionLocatorImpl.getName()
 
 
 TableName
-AsyncTableImpl.getName()
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
-AsyncTable.getName()
+RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-AsyncTableRegionLocatorImpl.getName()
+AsyncBufferedMutatorImpl.getName()
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index bb30224..5fb5aab 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -162,7 +162,7 @@ service.
 
 
 private static HRegionLocation
-AsyncMetaTableAccessor.getRegionLocation(Resultr,
+MetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -171,7 +171,7 @@ service.
 
 
 private static HRegionLocation
-MetaTableAccessor.getRegionLocation(Resultr,
+AsyncMetaTableAccessor.getRegionLocation(Resultr,
  RegionInforegionInfo,
  intreplicaId)
 Returns the HRegionLocation parsed from the given meta row 
Result
@@ -304,14 +304,6 @@ service.
 HTableMultiplexer.FlushWorker.addr
 
 
-HRegionLocation
-AsyncClientScanner.OpenScannerResponse.loc
-
-
-private HRegionLocation
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc
-
-
 private HRegionLocation
 AsyncScanSingleRegionRpcRetryingCaller.loc
 
@@ -320,15 +312,23 @@ service.
 AsyncBatchRpcRetryingCaller.RegionRequest.loc
 
 
-protected HRegionLocation
-RegionAdminServiceCallable.location
+HRegionLocation
+AsyncClientScanner.OpenScannerResponse.loc
 
 
+private HRegionLocation
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.loc
+
+
 protected HRegionLocation
 RegionServerCallable.location
 Some subclasses want to set their own location.
 
 
+
+protected HRegionLocation
+RegionAdminServiceCallable.location
+
 
 
 
@@ -371,11 +371,11 @@ service.
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation()
+RegionServerCallable.getLocation()
 
 
 protected HRegionLocation
-RegionServerCallable.getLocation()
+MultiServerCallable.getLocation()
 
 
 HRegionLocation
@@ -383,44 +383,44 @@ service.
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row)
+RegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row)
+HRegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row,
+RegionLocator.getRegionLocation(byte[]row,
  booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row,
+HRegionLocator.getRegionLocation(byte[]row,
  booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-ConnectionImplementation.getRegionLocation(TableNametableName,
- byte[]row,
- booleanreload)
-
-
-HRegionLocation
 ClusterConnection.getRegionLocation(TableNametableName,
  byte[]row,
  booleanreload)
 Find region location hosting passed row
 
 
+
+HRegionLocation
+ConnectionImplementation.getRegionLocation(TableNametableName,
+ byte[]row,
+ booleanreload)
+
 
 private HRegionLocation
 AsyncRequestFutureImpl.getReplicaLocationOrFail(Actionaction)
@@ -434,20 +434,15 @@ service.
 
 
 HRegionLocation
-ConnectionImplementation.locateRegion(byte[]regionName)
-
-
-HRegionLocation
 ClusterConnection.locateRegion(byte[]regionName)
 Gets the location of the region of regionName.
 
 
-
+
 HRegionLocation
-ConnectionImplementation.locateRegion(TableNametableName,
-byte[]row)
+ConnectionImplementation.locateRegion(byte[]regionName)
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableNametableName,
 byte[]row)
@@ -455,6 +450,11 @@ service.
  lives in.
 
 
+
+HRegionLocation
+ConnectionImplementation.locateRegion(TableNametableName,
+byte[]row)
+
 
 private HRegionLocation
 AsyncNonMetaRegionLocator.locateRowBeforeInCache(AsyncNonMetaRegionLocator.TableCachetableCache,
@@ -469,17 +469,17 @@ service.
 
 
 HRegionLocation
-ConnectionImplementation.relocateRegion(TableNametableName,
-  byte[]row)
-
-
-HRegionLocation
 ClusterConnection.relocateRegion(TableNametableName,
   byte[]row)
 Find the location of the region of tableName that 
row
  lives in, ignoring any value that might be in the cache.
 
 
+
+HRegionLocation
+ConnectionImplementation.relocateRegion(TableNametableName,
+  byte[]row)
+
 
 
 
@@ -491,14 +491,14 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-HRegionLocator.getAllRegionLocations()
-
-

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 3d3a67a..115dbc0 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2055,119 +2055,119 @@ service.
 
 
 private TableName
-SnapshotDescription.table
+RegionCoprocessorRpcChannel.table
 
 
 private TableName
-RegionCoprocessorRpcChannel.table
+SnapshotDescription.table
 
 
 private TableName
-RawAsyncTableImpl.tableName
+HRegionLocator.tableName
 
 
 private TableName
-RegionServerCallable.tableName
+ScannerCallableWithReplicas.tableName
 
 
 protected TableName
-RegionAdminServiceCallable.tableName
+ClientScanner.tableName
 
 
 private TableName
-BufferedMutatorImpl.tableName
+AsyncClientScanner.tableName
 
 
 private TableName
-AsyncProcessTask.tableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
 
 
 private TableName
-AsyncProcessTask.Builder.tableName
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
 
 
 private TableName
-AsyncRequestFutureImpl.tableName
+RegionInfoBuilder.tableName
 
 
-protected TableName
-TableBuilderBase.tableName
+private TableName
+RegionInfoBuilder.MutableRegionInfo.tableName
 
 
 private TableName
-AsyncBatchRpcRetryingCaller.tableName
+RawAsyncTableImpl.tableName
 
 
 private TableName
-RegionInfoBuilder.tableName
+RegionCoprocessorRpcChannelImpl.tableName
 
 
 private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName
+AsyncTableRegionLocatorImpl.tableName
 
 
-private TableName
-HTable.tableName
+protected TableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
-TableState.tableName
+HTable.tableName
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+private TableName
+BufferedMutatorImpl.tableName
 
 
-protected TableName
-AsyncTableBuilderBase.tableName
+private TableName
+AsyncBatchRpcRetryingCaller.tableName
 
 
 private TableName
-AsyncSingleRequestRpcRetryingCaller.tableName
+BufferedMutatorParams.tableName
 
 
 private TableName
-ScannerCallableWithReplicas.tableName
+HBaseAdmin.TableFuture.tableName
 
 
-protected TableName
-RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
+private TableName
+AsyncRequestFutureImpl.tableName
 
 
 private TableName
-AsyncTableRegionLocatorImpl.tableName
+AsyncProcessTask.tableName
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+AsyncProcessTask.Builder.tableName
 
 
-private TableName
-RegionCoprocessorRpcChannelImpl.tableName
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
 
 
-protected TableName
-ClientScanner.tableName
+private TableName
+RegionServerCallable.tableName
 
 
 private TableName
-BufferedMutatorParams.tableName
+AsyncSingleRequestRpcRetryingCaller.tableName
 
 
-private TableName
-AsyncClientScanner.tableName
+protected TableName
+TableBuilderBase.tableName
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
-private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
+protected TableName
+AsyncTableBuilderBase.tableName
 
 
 private TableName
-HRegionLocator.tableName
+TableState.tableName
 
 
 
@@ -2209,83 +2209,83 @@ service.
 
 
 TableName
-RawAsyncTableImpl.getName()
+AsyncTable.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-RegionLocator.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-BufferedMutatorImpl.getName()
+HRegionLocator.getName()
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table name instance of the table 
whose region we want to locate.
 
 
 
 TableName
-HTable.getName()
+AsyncTableImpl.getName()
 
 
 TableName
-AsyncBufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this
- AsyncBufferedMutator writes to.
-
+RawAsyncTableImpl.getName()
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
-
+AsyncTableRegionLocatorImpl.getName()
 
 
 TableName
-AsyncTableImpl.getName()
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
-AsyncTable.getName()
+RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-AsyncTableRegionLocatorImpl.getName()
+AsyncBufferedMutatorImpl.getName()
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html
index 9f3035a..f2c8f10 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncBufferedMutatorBuilder.html
@@ -121,26 +121,26 @@
 
 
 AsyncBufferedMutatorBuilder
+AsyncConnectionImpl.getBufferedMutatorBuilder(TableNametableName)
+
+
+AsyncBufferedMutatorBuilder
 AsyncConnection.getBufferedMutatorBuilder(TableNametableName)
 Returns an AsyncBufferedMutatorBuilder 
for creating AsyncBufferedMutator.
 
 
-
+
 AsyncBufferedMutatorBuilder
-AsyncConnectionImpl.getBufferedMutatorBuilder(TableNametableName)
+AsyncConnectionImpl.getBufferedMutatorBuilder(TableNametableName,
+ http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
 
-
+
 AsyncBufferedMutatorBuilder
 AsyncConnection.getBufferedMutatorBuilder(TableNametableName,
  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
 Returns an AsyncBufferedMutatorBuilder 
for creating AsyncBufferedMutator.
 
 
-
-AsyncBufferedMutatorBuilder
-AsyncConnectionImpl.getBufferedMutatorBuilder(TableNametableName,
- http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
-
 
 AsyncBufferedMutatorBuilder
 AsyncBufferedMutatorBuilderImpl.setMaxAttempts(intmaxAttempts)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
index 024eca4..5ba2deb 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
@@ -106,11 +106,11 @@
 
 
 private AsyncConnectionImpl
-AsyncClientScanner.conn
+RawAsyncTableImpl.conn
 
 
 private AsyncConnectionImpl
-AsyncRpcRetryingCallerFactory.conn
+AsyncBatchRpcRetryingCaller.conn
 
 
 private AsyncConnectionImpl
@@ -118,19 +118,19 @@
 
 
 private AsyncConnectionImpl
-RawAsyncTableImpl.conn
+RegionCoprocessorRpcChannelImpl.conn
 
 
-private AsyncConnectionImpl
-RegionCoprocessorRpcChannelImpl.conn
+protected AsyncConnectionImpl
+AsyncRpcRetryingCaller.conn
 
 
 private AsyncConnectionImpl
-AsyncBatchRpcRetryingCaller.conn
+AsyncClientScanner.conn
 
 
-protected AsyncConnectionImpl
-AsyncRpcRetryingCaller.conn
+private AsyncConnectionImpl
+AsyncRpcRetryingCallerFactory.conn
 
 
 private AsyncConnectionImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
index d6b1759..e71ca45 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncMasterRequestRpcRetryingCaller.CallableT
-AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.callable
-
-
 private AsyncMasterRequestRpcRetryingCaller.CallableT
 AsyncMasterRequestRpcRetryingCaller.callable
 
+
+private AsyncMasterRequestRpcRetryingCaller.CallableT
+AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.callable
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
index f31564e..60fbcff 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
+++ 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
index 852f85c..174dfdd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":9,"i38":10,"i39":10,"i40":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ReplicationSourceManager
+public class ReplicationSourceManager
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ReplicationListener
 This class is responsible to manage all the replication 
sources. There are two classes of
@@ -329,7 +329,7 @@ implements 
-All MethodsStatic MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
@@ -538,29 +538,16 @@ implements 
-(package private) void
-scopeWALEdits(WALKeylogKey,
- WALEditlogEdit)
-
-
-(package private) static void
-scopeWALEdits(WALKeylogKey,
- WALEditlogEdit,
- org.apache.hadoop.conf.Configurationconf)
-Utility method used to set the correct scopes on each log 
key.
-
-
-
 private void
 throwIOExceptionWhenFail(ReplicationSourceManager.ReplicationQueueOperationop)
 
-
+
 private void
 transferQueues(ServerNamedeadRS)
 Transfer all the queues of the specified to this region 
server.
 
 
-
+
 (package private) void
 waitUntilCanBePushed(byte[]encodedName,
 longseq,
@@ -596,7 +583,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -605,7 +592,7 @@ implements 
 
 sources
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationSourceInterface
 sources
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationSourceInterface
 sources
 
 
 
@@ -614,7 +601,7 @@ implements 
 
 oldsources
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationSourceInterface
 oldsources
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationSourceInterface
 oldsources
 
 
 
@@ -623,7 +610,7 @@ implements 
 
 queueStorage
-private finalReplicationQueueStorage queueStorage
+private finalReplicationQueueStorage queueStorage
 
 
 
@@ -632,7 +619,7 @@ implements 
 
 replicationTracker
-private finalReplicationTracker replicationTracker
+private finalReplicationTracker replicationTracker
 
 
 
@@ -641,7 +628,7 @@ implements 
 
 replicationPeers
-private finalReplicationPeers replicationPeers
+private finalReplicationPeers replicationPeers
 
 
 
@@ -650,7 +637,7 @@ implements 
 
 clusterId
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID clusterId
+private 

[32/51] [partial] hbase-site git commit: Published site at .

2018-02-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/org/apache/hadoop/hbase/regionserver/Region.Operation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.Operation.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.Operation.html
index aa09a37..508f9ac 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.Operation.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.Operation.html
@@ -382,7 +382,7 @@ the order they are declared.
 
 
 values
-public staticRegion.Operation[]values()
+public staticRegion.Operation[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -402,7 +402,7 @@ for (Region.Operation c : Region.Operation.values())
 
 
 valueOf
-public staticRegion.OperationvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticRegion.OperationvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
index 4744420..1f13b70 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
@@ -188,13 +188,12 @@ extends 
 boolean
-checkAndRowMutate(byte[]row,
+checkAndRowMutate(byte[]row,
  byte[]family,
  byte[]qualifier,
  CompareOperatorop,
  ByteArrayComparablecomparator,
- RowMutationsmutations,
- booleanwriteToWAL)
+ RowMutationsmutations)
 Atomically checks if a row/family/qualifier value matches 
the expected values and if it does,
  it performs the row mutations.
 
@@ -1021,19 +1020,18 @@ extends 
+
 
 
 
 
 checkAndRowMutate
-booleancheckAndRowMutate(byte[]row,
+booleancheckAndRowMutate(byte[]row,
   byte[]family,
   byte[]qualifier,
   CompareOperatorop,
   ByteArrayComparablecomparator,
-  RowMutationsmutations,
-  booleanwriteToWAL)
+  RowMutationsmutations)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Atomically checks if a row/family/qualifier value matches 
the expected values and if it does,
  it performs the row mutations. If the passed value is null, the lack of 
column value
@@ -1047,7 +1045,6 @@ extends Returns:
 true if mutations were applied, false otherwise
 Throws:
@@ -1061,7 +1058,7 @@ extends 
 
 delete
-voiddelete(Deletedelete)
+voiddelete(Deletedelete)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deletes the specified cells/row.
 
@@ -1078,7 +1075,7 @@ extends 
 
 get
-Resultget(Getget)
+Resultget(Getget)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Do a get based on the get parameter.
 
@@ -1097,7 +1094,7 @@ extends 
 
 get
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellget(Getget,
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellget(Getget,
booleanwithCoprocessor)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Do a get based on the get parameter.
@@ -1119,7 +1116,7 @@ extends 
 
 getScanner
-RegionScannergetScanner(Scanscan)
+RegionScannergetScanner(Scanscan)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Return an iterator that scans over the HRegion, returning 
the indicated
  columns and rows specified by the Scan.
@@ -1141,7 +1138,7 @@ extends 
 
 getScanner
-RegionScannergetScanner(Scanscan,
+RegionScannergetScanner(Scanscan,

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/TestInfoServers.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/TestInfoServers.html 
b/testdevapidocs/org/apache/hadoop/hbase/TestInfoServers.html
index a6ec8b4..498b8b2 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/TestInfoServers.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/TestInfoServers.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestInfoServers
+public class TestInfoServers
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Testing, info servers are disabled.  This test enables then 
and checks that
  they serve pages.
@@ -132,14 +132,18 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Field and Description
 
 
+static HBaseClassTestRule
+CLASS_RULE
+
+
 private static org.slf4j.Logger
 LOG
 
-
+
 org.junit.rules.TestName
 name
 
-
+
 private static HBaseTestingUtility
 UTIL
 
@@ -240,13 +244,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Field Detail
+
+
+
+
+
+CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
+
+
 
 
 
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -255,7 +268,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 UTIL
-private static finalHBaseTestingUtility UTIL
+private static finalHBaseTestingUtility UTIL
 
 
 
@@ -264,7 +277,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 name
-publicorg.junit.rules.TestName name
+publicorg.junit.rules.TestName name
 
 
 
@@ -281,7 +294,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TestInfoServers
-publicTestInfoServers()
+publicTestInfoServers()
 
 
 
@@ -298,7 +311,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 beforeClass
-public staticvoidbeforeClass()
+public staticvoidbeforeClass()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -312,7 +325,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 afterClass
-public staticvoidafterClass()
+public staticvoidafterClass()
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -326,7 +339,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testGetMasterInfoPort
-publicvoidtestGetMasterInfoPort()
+publicvoidtestGetMasterInfoPort()
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -340,7 +353,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testInfoServersRedirect
-publicvoidtestInfoServersRedirect()
+publicvoidtestInfoServersRedirect()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Ensure when we go to top level index pages that we get 
redirected to an info-server specific status
  page.
@@ -356,7 +369,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testInfoServersStatusPages
-publicvoidtestInfoServersStatusPages()
+publicvoidtestInfoServersStatusPages()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Test that the status pages in the minicluster load properly.
 
@@ -375,7 +388,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testMasterServerReadOnly
-publicvoidtestMasterServerReadOnly()
+publicvoidtestMasterServerReadOnly()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -389,7 +402,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 assertContainsContent
-privatevoidassertContainsContent(http://docs.oracle.com/javase/8/docs/api/java/net/URL.html?is-external=true;
 title="class or interface in java.net">URLu,
+privatevoidassertContainsContent(http://docs.oracle.com/javase/8/docs/api/java/net/URL.html?is-external=true;
 title="class or interface in java.net">URLu,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringexpected)
 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
index 658bfab..7279ea9 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
@@ -55,16 +55,16 @@
 047import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
 048import 
org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 049import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-050import 
org.apache.yetus.audience.InterfaceAudience;
-051import org.slf4j.Logger;
-052import org.slf4j.LoggerFactory;
-053import 
org.apache.hadoop.hbase.client.Connection;
-054import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-055import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
-056import 
org.apache.hadoop.hbase.util.FSUtils;
-057import 
org.apache.hadoop.util.ToolRunner;
-058import org.apache.log4j.Level;
-059import org.apache.log4j.LogManager;
+050import 
org.apache.hadoop.hbase.client.Connection;
+051import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+052import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
+053import 
org.apache.hadoop.hbase.util.FSUtils;
+054import 
org.apache.hadoop.util.ToolRunner;
+055import org.apache.log4j.Level;
+056import org.apache.log4j.LogManager;
+057import 
org.apache.yetus.audience.InterfaceAudience;
+058import org.slf4j.Logger;
+059import org.slf4j.LoggerFactory;
 060
 061/**
 062 *
@@ -73,213 +73,215 @@
 065 */
 066@InterfaceAudience.Private
 067public class RestoreDriver extends 
AbstractHBaseTool {
-068
-069  private static final Logger LOG = 
LoggerFactory.getLogger(RestoreDriver.class);
-070  private CommandLine cmd;
-071
-072  private static final String 
USAGE_STRING =
-073  "Usage: hbase restore 
backup_path backup_id [options]\n"
-074  + "  backup_path Path to a 
backup destination root\n"
-075  + "  backup_id   Backup 
image ID to restore\n"
-076  + "  table(s)
Comma-separated list of tables to restore\n";
-077
-078  private static final String 
USAGE_FOOTER = "";
-079
-080  protected RestoreDriver() throws 
IOException {
-081init();
-082  }
-083
-084  protected void init() throws 
IOException {
-085// disable irrelevant loggers to 
avoid it mess up command output
-086
LogUtils.disableZkAndClientLoggers();
-087  }
-088
-089  private int parseAndRun(String[] args) 
throws IOException {
-090// Check if backup is enabled
-091if 
(!BackupManager.isBackupEnabled(getConf())) {
-092  
System.err.println(BackupRestoreConstants.ENABLE_BACKUP);
-093  return -1;
-094}
-095
-096
System.out.println(BackupRestoreConstants.VERIFY_BACKUP);
-097
-098// enable debug logging
-099if (cmd.hasOption(OPTION_DEBUG)) {
-100  
LogManager.getLogger("org.apache.hadoop.hbase.backup").setLevel(Level.DEBUG);
-101}
-102
-103// whether to overwrite to existing 
table if any, false by default
-104boolean overwrite = 
cmd.hasOption(OPTION_OVERWRITE);
-105if (overwrite) {
-106  LOG.debug("Found -overwrite option 
in restore command, "
-107  + "will overwrite to existing 
table if any in the restore target");
-108}
-109
-110// whether to only check the 
dependencies, false by default
-111boolean check = 
cmd.hasOption(OPTION_CHECK);
-112if (check) {
-113  LOG.debug("Found -check option in 
restore command, "
-114  + "will check and verify the 
dependencies");
-115}
-116
-117if (cmd.hasOption(OPTION_SET) 
 cmd.hasOption(OPTION_TABLE)) {
-118  System.err.println("Options -s and 
-t are mutaully exclusive,"+
-119  " you can not specify both of 
them.");
-120  printToolUsage();
-121  return -1;
-122}
-123
-124if (!cmd.hasOption(OPTION_SET) 
 !cmd.hasOption(OPTION_TABLE)) {
-125  System.err.println("You have to 
specify either set name or table list to restore");
-126  printToolUsage();
-127  return -1;
-128}
-129
-130if 
(cmd.hasOption(OPTION_YARN_QUEUE_NAME)) {
-131  String queueName = 
cmd.getOptionValue(OPTION_YARN_QUEUE_NAME);
-132  // Set system property value for MR 
job
-133  
System.setProperty("mapreduce.job.queuename", queueName);
-134}
-135
-136// parse main restore command 
options
-137String[] remainArgs = 
cmd.getArgs();
-138if (remainArgs.length != 2) {
-139  printToolUsage();
-140  return -1;
-141}
-142
-143String backupRootDir = 
remainArgs[0];
-144String backupId = remainArgs[1];
-145String tables = null;
-146String tableMapping =
-147
cmd.hasOption(OPTION_TABLE_MAPPING) ? cmd.getOptionValue(OPTION_TABLE_MAPPING) 
: null;
-148try (final 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
index ca8be5e..b8e6dfa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
@@ -6398,514 +6398,514 @@
 6390  int initialBatchProgress = 
scannerContext.getBatchProgress();
 6391  long initialSizeProgress = 
scannerContext.getDataSizeProgress();
 6392  long initialHeapSizeProgress = 
scannerContext.getHeapSizeProgress();
-6393  long initialTimeProgress = 
scannerContext.getTimeProgress();
-6394
-6395  // The loop here is used only when 
at some point during the next we determine
-6396  // that due to effects of filters 
or otherwise, we have an empty row in the result.
-6397  // Then we loop and try again. 
Otherwise, we must get out on the first iteration via return,
-6398  // "true" if there's more data to 
read, "false" if there isn't (storeHeap is at a stop row,
-6399  // and joinedHeap has no more data 
to read for the last row (if set, joinedContinuationRow).
-6400  while (true) {
-6401// Starting to scan a new row. 
Reset the scanner progress according to whether or not
-6402// progress should be kept.
-6403if 
(scannerContext.getKeepProgress()) {
-6404  // Progress should be kept. 
Reset to initial values seen at start of method invocation.
-6405  
scannerContext.setProgress(initialBatchProgress, initialSizeProgress,
-6406  initialHeapSizeProgress, 
initialTimeProgress);
-6407} else {
-6408  
scannerContext.clearProgress();
-6409}
-6410if (rpcCall.isPresent()) {
-6411  // If a user specifies a 
too-restrictive or too-slow scanner, the
-6412  // client might time out and 
disconnect while the server side
-6413  // is still processing the 
request. We should abort aggressively
-6414  // in that case.
-6415  long afterTime = 
rpcCall.get().disconnectSince();
-6416  if (afterTime = 0) {
-6417throw new 
CallerDisconnectedException(
-6418"Aborting on region " + 
getRegionInfo().getRegionNameAsString() + ", call " +
-6419this + " after " + 
afterTime + " ms, since " +
-6420"caller 
disconnected");
-6421  }
-6422}
-6423
-6424// Let's see what we have in the 
storeHeap.
-6425Cell current = 
this.storeHeap.peek();
-6426
-6427boolean shouldStop = 
shouldStop(current);
-6428// When has filter row is true 
it means that the all the cells for a particular row must be
-6429// read before a filtering 
decision can be made. This means that filters where hasFilterRow
-6430// run the risk of 
enLongAddering out of memory errors in the case that they are applied to a
-6431// table that has very large 
rows.
-6432boolean hasFilterRow = 
this.filter != null  this.filter.hasFilterRow();
-6433
-6434// If filter#hasFilterRow is 
true, partial results are not allowed since allowing them
-6435// would prevent the filters 
from being evaluated. Thus, if it is true, change the
-6436// scope of any limits that 
could potentially create partial results to
-6437// LimitScope.BETWEEN_ROWS so 
that those limits are not reached mid-row
-6438if (hasFilterRow) {
-6439  if (LOG.isTraceEnabled()) {
-6440
LOG.trace("filter#hasFilterRow is true which prevents partial results from 
being "
-6441+ " formed. Changing 
scope of limits that may create partials");
-6442  }
-6443  
scannerContext.setSizeLimitScope(LimitScope.BETWEEN_ROWS);
-6444  
scannerContext.setTimeLimitScope(LimitScope.BETWEEN_ROWS);
-6445}
-6446
-6447// Check if we were getting data 
from the joinedHeap and hit the limit.
-6448// If not, then it's main path - 
getting results from storeHeap.
-6449if (joinedContinuationRow == 
null) {
-6450  // First, check if we are at a 
stop row. If so, there are no more results.
-6451  if (shouldStop) {
-6452if (hasFilterRow) {
-6453  
filter.filterRowCells(results);
-6454}
-6455return 
scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
-6456  }
-6457
-6458  // Check if rowkey filter 
wants to exclude this row. If so, loop to next.
-6459  // Technically, if we hit 
limits before on this row, we don't need this call.

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
index 3d7e662..8c97d73 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
@@ -1049,6 +1049,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
   byte[]row)
 
 
+private void
+HBaseAdmin.flush(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterfaceadmin,
+ RegionInfoinfo)
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+RawAsyncHBaseAdmin.flush(ServerNameserverName,
+ RegionInforegionInfo)
+
+
 static byte[]
 RegionInfoDisplay.getEndKeyForDisplay(RegionInfori,
org.apache.hadoop.conf.Configurationconf)
@@ -5023,6 +5033,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+static HRegion
+HRegion.openReadOnlyFileSystemHRegion(org.apache.hadoop.conf.Configurationconf,
+ org.apache.hadoop.fs.FileSystemfs,
+ org.apache.hadoop.fs.PathtableDir,
+ RegionInfoinfo,
+ TableDescriptorhtd)
+Open a Region on a read-only file-system (like hdfs 
snapshots)
+
+
+
 static HRegionFileSystem
 HRegionFileSystem.openRegionFromFileSystem(org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.fs.FileSystemfs,
@@ -5032,12 +5052,12 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Open Region from file-system.
 
 
-
+
 void
 RegionCoprocessorHost.postReplayWALs(RegionInfoinfo,
   org.apache.hadoop.fs.Pathedits)
 
-
+
 void
 RegionCoprocessorHost.postWALRestore(RegionInfoinfo,
   WALKeylogKey,
@@ -5048,12 +5068,12 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-
+
 void
 RegionCoprocessorHost.preReplayWALs(RegionInfoinfo,
  org.apache.hadoop.fs.Pathedits)
 
-
+
 boolean
 RegionCoprocessorHost.preWALRestore(RegionInfoinfo,
  WALKeylogKey,
@@ -5064,7 +5084,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
+
 static boolean
 HRegion.rowIsInRange(RegionInfoinfo,
 byte[]row)
@@ -5072,14 +5092,14 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
  specified RegionInfo
 
 
-
+
 static boolean
 HRegion.rowIsInRange(RegionInfoinfo,
 byte[]row,
 intoffset,
 shortlength)
 
-
+
 org.apache.hadoop.fs.Path
 HRegionFileSystem.splitStoreFile(RegionInfohri,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
@@ -5090,7 +5110,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Write out a split reference.
 
 
-
+
 static void
 HRegion.warmupHRegion(RegionInfoinfo,
  TableDescriptorhtd,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index 65e4619..f38b5f3 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -2029,17 +2029,27 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
+static HRegion
+HRegion.openReadOnlyFileSystemHRegion(org.apache.hadoop.conf.Configurationconf,
+ org.apache.hadoop.fs.FileSystemfs,
+ org.apache.hadoop.fs.PathtableDir,
+ RegionInfoinfo,
+ TableDescriptorhtd)
+Open a Region on a read-only file-system (like hdfs 
snapshots)
+
+
+
 (package private) void
 HRegion.setTableDescriptor(TableDescriptordesc)
 
-
+
 static void
 RegionCoprocessorHost.testTableCoprocessorAttrs(org.apache.hadoop.conf.Configurationconf,
  TableDescriptorhtd)
 Sanity check the table coprocessor attributes of the 
supplied schema.
 
 
-
+
 static void
 HRegion.warmupHRegion(RegionInfoinfo,
  

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
index b50a65f..7271567 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.WorkerMonitor.html
@@ -1718,312 +1718,314 @@
 1710
 1711public WorkerThread(final 
ThreadGroup group) {
 1712  super(group, "ProcExecWrkr-" + 
workerId.incrementAndGet());
-1713}
-1714
-1715@Override
-1716public void sendStopSignal() {
-1717  scheduler.signalAll();
-1718}
-1719
-1720@Override
-1721public void run() {
-1722  long lastUpdate = 
EnvironmentEdgeManager.currentTime();
-1723  try {
-1724while (isRunning()  
keepAlive(lastUpdate)) {
-1725  this.activeProcedure = 
scheduler.poll(keepAliveTime, TimeUnit.MILLISECONDS);
-1726  if (this.activeProcedure == 
null) continue;
-1727  int activeCount = 
activeExecutorCount.incrementAndGet();
-1728  int runningCount = 
store.setRunningProcedureCount(activeCount);
-1729  if (LOG.isTraceEnabled()) {
-1730LOG.trace("Execute pid=" + 
this.activeProcedure.getProcId() +
-1731" runningCount=" + 
runningCount + ", activeCount=" + activeCount);
-1732  }
-1733  
executionStartTime.set(EnvironmentEdgeManager.currentTime());
-1734  try {
-1735
executeProcedure(this.activeProcedure);
-1736  } catch (AssertionError e) {
-1737LOG.info("ASSERT pid=" + 
this.activeProcedure.getProcId(), e);
-1738throw e;
-1739  } finally {
-1740activeCount = 
activeExecutorCount.decrementAndGet();
-1741runningCount = 
store.setRunningProcedureCount(activeCount);
-1742if (LOG.isTraceEnabled()) 
{
-1743  LOG.trace("Halt pid=" + 
this.activeProcedure.getProcId() +
-1744  " runningCount=" + 
runningCount + ", activeCount=" + activeCount);
-1745}
-1746this.activeProcedure = 
null;
-1747lastUpdate = 
EnvironmentEdgeManager.currentTime();
-1748
executionStartTime.set(Long.MAX_VALUE);
-1749  }
-1750}
-1751  } catch (Throwable t) {
-1752LOG.warn("Worker terminating 
UNNATURALLY " + this.activeProcedure, t);
-1753  } finally {
-1754LOG.debug("Worker 
terminated.");
-1755  }
-1756  workerThreads.remove(this);
-1757}
-1758
-1759@Override
-1760public String toString() {
-1761  Procedure? p = 
this.activeProcedure;
-1762  return getName() + "(pid=" + (p == 
null? Procedure.NO_PROC_ID: p.getProcId() + ")");
-1763}
-1764
-1765/**
-1766 * @return the time since the 
current procedure is running
-1767 */
-1768public long getCurrentRunTime() {
-1769  return 
EnvironmentEdgeManager.currentTime() - executionStartTime.get();
-1770}
-1771
-1772private boolean keepAlive(final long 
lastUpdate) {
-1773  if (workerThreads.size() = 
corePoolSize) return true;
-1774  return 
(EnvironmentEdgeManager.currentTime() - lastUpdate)  keepAliveTime;
-1775}
-1776  }
-1777
-1778  /**
-1779   * Runs task on a period such as check 
for stuck workers.
-1780   * @see InlineChore
-1781   */
-1782  private final class 
TimeoutExecutorThread extends StoppableThread {
-1783private final 
DelayQueueDelayedWithTimeout queue = new DelayQueue();
-1784
-1785public TimeoutExecutorThread(final 
ThreadGroup group) {
-1786  super(group, "ProcExecTimeout");
-1787}
-1788
-1789@Override
-1790public void sendStopSignal() {
-1791  
queue.add(DelayedUtil.DELAYED_POISON);
-1792}
-1793
-1794@Override
-1795public void run() {
-1796  final boolean traceEnabled = 
LOG.isTraceEnabled();
-1797  while (isRunning()) {
-1798final DelayedWithTimeout task = 
DelayedUtil.takeWithoutInterrupt(queue);
-1799if (task == null || task == 
DelayedUtil.DELAYED_POISON) {
-1800  // the executor may be 
shutting down,
-1801  // and the task is just the 
shutdown request
-1802  continue;
-1803}
-1804
-1805if (traceEnabled) {
-1806  LOG.trace("Executing " + 
task);
-1807}
-1808
-1809// execute the task
-1810if (task instanceof InlineChore) 
{
-1811  
execInlineChore((InlineChore)task);
-1812} else if (task instanceof 
DelayedProcedure) {
-1813  
execDelayedProcedure((DelayedProcedure)task);
-1814} else {
-1815  LOG.error("CODE-BUG unknown 
timeout task type " + 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 163ade0..802fc2f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -254,3512 +254,3505 @@
 246  protected MemStoreFlusher 
cacheFlusher;
 247
 248  protected HeapMemoryManager 
hMemManager;
-249  protected CountDownLatch initLatch = 
null;
-250
-251  /**
-252   * Cluster connection to be shared by 
services.
-253   * Initialized at server startup and 
closed when server shuts down.
-254   * Clients must never close it 
explicitly.
-255   */
-256  protected ClusterConnection 
clusterConnection;
-257
-258  /*
-259   * Long-living meta table locator, 
which is created when the server is started and stopped
-260   * when server shuts down. References 
to this locator shall be used to perform according
-261   * operations in EventHandlers. Primary 
reason for this decision is to make it mockable
-262   * for tests.
-263   */
-264  protected MetaTableLocator 
metaTableLocator;
-265
-266  /**
-267   * Go here to get table descriptors.
-268   */
-269  protected TableDescriptors 
tableDescriptors;
-270
-271  // Replication services. If no 
replication, this handler will be null.
-272  protected ReplicationSourceService 
replicationSourceHandler;
-273  protected ReplicationSinkService 
replicationSinkHandler;
-274
-275  // Compactions
-276  public CompactSplit 
compactSplitThread;
-277
-278  /**
-279   * Map of regions currently being 
served by this region server. Key is the
-280   * encoded region name.  All access 
should be synchronized.
-281   */
-282  protected final MapString, 
HRegion onlineRegions = new ConcurrentHashMap();
-283
-284  /**
-285   * Map of encoded region names to the 
DataNode locations they should be hosted on
-286   * We store the value as 
InetSocketAddress since this is used only in HDFS
-287   * API (create() that takes favored 
nodes as hints for placing file blocks).
-288   * We could have used ServerName here 
as the value class, but we'd need to
-289   * convert it to InetSocketAddress at 
some point before the HDFS API call, and
-290   * it seems a bit weird to store 
ServerName since ServerName refers to RegionServers
-291   * and here we really mean DataNode 
locations.
-292   */
-293  protected final MapString, 
InetSocketAddress[] regionFavoredNodesMap =
-294  new ConcurrentHashMap();
-295
-296  // Leases
-297  protected Leases leases;
-298
-299  // Instance of the hbase executor 
executorService.
-300  protected ExecutorService 
executorService;
-301
-302  // If false, the file system has become 
unavailable
-303  protected volatile boolean fsOk;
-304  protected HFileSystem fs;
-305  protected HFileSystem walFs;
-306
-307  // Set when a report to the master 
comes back with a message asking us to
-308  // shutdown. Also set by call to stop 
when debugging or running unit tests
-309  // of HRegionServer in isolation.
-310  private volatile boolean stopped = 
false;
-311
-312  // Go down hard. Used if file system 
becomes unavailable and also in
-313  // debugging and unit tests.
-314  private volatile boolean 
abortRequested;
-315
-316  ConcurrentMapString, Integer 
rowlocks = new ConcurrentHashMap();
-317
-318  // A state before we go into stopped 
state.  At this stage we're closing user
-319  // space regions.
-320  private boolean stopping = false;
-321
-322  volatile boolean killed = false;
-323
-324  protected final Configuration conf;
-325
-326  private Path rootDir;
-327  private Path walRootDir;
-328
-329  protected final ReentrantReadWriteLock 
lock = new ReentrantReadWriteLock();
-330
-331  final int numRetries;
-332  protected final int 
threadWakeFrequency;
-333  protected final int msgInterval;
-334
-335  protected final int 
numRegionsToReport;
-336
-337  // Stub to do region server status 
calls against the master.
-338  private volatile 
RegionServerStatusService.BlockingInterface rssStub;
-339  private volatile 
LockService.BlockingInterface lockStub;
-340  // RPC client. Used to make the stub 
above that does region server status checking.
-341  RpcClient rpcClient;
-342
-343  private RpcRetryingCallerFactory 
rpcRetryingCallerFactory;
-344  private RpcControllerFactory 
rpcControllerFactory;
-345
-346  private UncaughtExceptionHandler 
uncaughtExceptionHandler;
-347
-348  // Info server. Default access so can 
be used by unit tests. REGIONSERVER
-349  // is name of the webapp and the 
attribute name used stuffing this instance
-350  // into web context.
-351  protected InfoServer infoServer;
-352  private JvmPauseMonitor pauseMonitor;
-353
-354  /** 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
index 1f114e0..01e19b2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
@@ -25,1239 +25,1263 @@
 017 */
 018package org.apache.hadoop.hbase.tool;
 019
-020import static java.lang.String.format;
-021
-022import java.io.FileNotFoundException;
-023import java.io.IOException;
-024import java.io.InterruptedIOException;
-025import java.nio.ByteBuffer;
-026import java.util.ArrayDeque;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.Collection;
-030import java.util.Collections;
-031import java.util.Deque;
-032import java.util.HashMap;
-033import java.util.HashSet;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Map.Entry;
-037import java.util.Optional;
-038import java.util.Set;
-039import java.util.SortedMap;
-040import java.util.TreeMap;
-041import java.util.UUID;
-042import java.util.concurrent.Callable;
-043import 
java.util.concurrent.ExecutionException;
-044import 
java.util.concurrent.ExecutorService;
-045import java.util.concurrent.Future;
-046import 
java.util.concurrent.LinkedBlockingQueue;
-047import 
java.util.concurrent.ThreadPoolExecutor;
-048import java.util.concurrent.TimeUnit;
-049import 
java.util.concurrent.atomic.AtomicInteger;
-050import java.util.stream.Collectors;
-051
-052import 
org.apache.commons.lang3.mutable.MutableInt;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.conf.Configured;
-055import org.apache.hadoop.fs.FileStatus;
-056import org.apache.hadoop.fs.FileSystem;
-057import org.apache.hadoop.fs.Path;
-058import 
org.apache.hadoop.fs.permission.FsPermission;
-059import 
org.apache.hadoop.hbase.HBaseConfiguration;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableName;
-062import 
org.apache.hadoop.hbase.TableNotFoundException;
-063import 
org.apache.yetus.audience.InterfaceAudience;
-064import org.slf4j.Logger;
-065import org.slf4j.LoggerFactory;
-066import 
org.apache.hadoop.hbase.client.Admin;
-067import 
org.apache.hadoop.hbase.client.ClientServiceCallable;
-068import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-069import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-070import 
org.apache.hadoop.hbase.client.Connection;
-071import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-072import 
org.apache.hadoop.hbase.client.RegionLocator;
-073import 
org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
-074import 
org.apache.hadoop.hbase.client.SecureBulkLoadClient;
-075import 
org.apache.hadoop.hbase.client.Table;
-076import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-077import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-078import 
org.apache.hadoop.hbase.io.HFileLink;
-079import 
org.apache.hadoop.hbase.io.HalfStoreFileReader;
-080import 
org.apache.hadoop.hbase.io.Reference;
-081import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-082import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-083import 
org.apache.hadoop.hbase.io.hfile.HFile;
-084import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-085import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-086import 
org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
-087import 
org.apache.hadoop.hbase.io.hfile.HFileScanner;
-088import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-089import 
org.apache.hadoop.hbase.regionserver.BloomType;
-090import 
org.apache.hadoop.hbase.regionserver.HStore;
-091import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-092import 
org.apache.hadoop.hbase.regionserver.StoreFileWriter;
-093import 
org.apache.hadoop.hbase.security.UserProvider;
-094import 
org.apache.hadoop.hbase.security.token.FsDelegationToken;
-095import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-096import 
org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;
-097import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-098import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimaps;
-099import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-100import 
org.apache.hadoop.hbase.util.Bytes;
-101import 
org.apache.hadoop.hbase.util.FSHDFSUtils;
-102import 
org.apache.hadoop.hbase.util.Pair;
-103import org.apache.hadoop.util.Tool;
-104import 
org.apache.hadoop.util.ToolRunner;
-105
-106/**
-107 * Tool to load the output of 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 898a1d0..fce86aa 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2595,39 +2595,53 @@ service.
 RegionInfoBuilder.MutableRegionInfo.checkTableName(TableNametableName)
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureCacheEvictionStats
+AsyncHBaseAdmin.clearBlockCache(TableNametableName)
+
+
 CacheEvictionStats
 Admin.clearBlockCache(TableNametableName)
 Clear all the blocks corresponding to this table from 
BlockCache.
 
 
-
+
 CacheEvictionStats
 HBaseAdmin.clearBlockCache(TableNametableName)
 Clear all the blocks corresponding to this table from 
BlockCache.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureCacheEvictionStats
+AsyncAdmin.clearBlockCache(TableNametableName)
+Clear all the blocks corresponding to this table from 
BlockCache.
+
+
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureCacheEvictionStats
+RawAsyncHBaseAdmin.clearBlockCache(TableNametableName)
+
+
 void
 MetaCache.clearCache(TableNametableName)
 Delete all cached entries of a table.
 
 
-
+
 (package private) void
 AsyncNonMetaRegionLocator.clearCache(TableNametableName)
 
-
+
 (package private) void
 AsyncRegionLocator.clearCache(TableNametableName)
 
-
+
 void
 MetaCache.clearCache(TableNametableName,
   byte[]row)
 Delete a cached location, no matter what it is.
 
 
-
+
 void
 MetaCache.clearCache(TableNametableName,
   byte[]row,
@@ -2635,7 +2649,7 @@ service.
 Delete a cached location with specific replicaId.
 
 
-
+
 void
 MetaCache.clearCache(TableNametableName,
   byte[]row,
@@ -2643,64 +2657,64 @@ service.
 Delete a cached location for a table, row and server
 
 
-
+
 void
 ConnectionImplementation.clearRegionCache(TableNametableName)
 
-
+
 void
 ClusterConnection.clearRegionCache(TableNametableName)
 Allows flushing the region cache of all locations that 
pertain to
  tableName
 
 
-
+
 void
 ConnectionImplementation.clearRegionCache(TableNametableName,
 byte[]row)
 
-
+
 void
 Admin.cloneSnapshot(byte[]snapshotName,
  TableNametableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 void
 HBaseAdmin.cloneSnapshot(byte[]snapshotName,
  TableNametableName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 AsyncHBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
  TableNametableName)
 
-
+
 void
 Admin.cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
  TableNametableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 void
 HBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
  TableNametableName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 AsyncAdmin.cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
  TableNametableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 RawAsyncHBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index d5264f6..89f9554 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -466,12 +466,12 @@ implements 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterStatus
-getClusterStatus()
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
+getClusterMetrics()
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterStatus
-getClusterStatus(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetrics
+getClusterMetrics(http://docs.oracle.com/javase/8/docs/api/java/util/EnumSet.html?is-external=true;
 title="class or interface in java.util">EnumSetClusterMetrics.Optionoptions)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureCompactionState
@@ -529,16 +529,16 @@ implements 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionLoad
-getRegionLoads(ServerNameserverName)
-Get a list of RegionLoad of all regions hosted on a 
region seerver.
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionMetrics
+getRegionMetrics(ServerNameserverName)
+Get a list of RegionMetrics of 
all regions hosted on a region seerver.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionLoad
-getRegionLoads(ServerNameserverName,
-  TableNametableName)
-Get a list of RegionLoad of all regions hosted on a 
region seerver for a table.
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionMetrics
+getRegionMetrics(ServerNameserverName,
+TableNametableName)
+Get a list of RegionMetrics of 
all regions hosted on a region seerver for a table.
 
 
 
@@ -1005,7 +1005,7 @@ implements AsyncAdmin
-addReplicationPeer,
 balance,
 compact,
 compact,
 getBackupMasters,
 getCompactionState,
 getMaster,
 getMasterCoprocessors, getMasterInfoPort,
 getRegionServers,
 listTableDescriptors,
 listTableNames,
 majorCompact,
 majorCompact,
 snapshot,
 snapshot
+addReplicationPeer,
 balance,
 compact,
 compact,
 getBackupMasters,
 getCompactionState,
 getMaster,
 getMasterCoprocessorNames, getMasterInfoPort,
 getRegionServers,
 listTableDescriptors,
 listTableNames,
 majorCompact,
 majorCompact,
 snapshot,
 snapshot
 
 
 
@@ -2877,31 +2877,31 @@ implements 
+
 
 
 
 
-getClusterStatus
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterStatusgetClusterStatus()
+getClusterMetrics
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureClusterMetricsgetClusterMetrics()
 
 Specified by:
-getClusterStatusin
 interfaceAsyncAdmin
+getClusterMetricsin
 interfaceAsyncAdmin
 Returns:
 cluster status wrapped by 

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
index 5b3b750..a1f3f7e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
@@ -97,3307 +97,3304 @@
 089import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
 090import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
 091import 
org.apache.hbase.thirdparty.io.netty.util.TimerTask;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/class-use/HBaseInterfaceAudience.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseInterfaceAudience.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseInterfaceAudience.html
index b649008..2682a12 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseInterfaceAudience.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseInterfaceAudience.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index eaf604f..692ee61 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -601,6 +601,6 @@ service.
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.OperationStatusCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.OperationStatusCode.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.OperationStatusCode.html
index 66b7095..8be63f6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.OperationStatusCode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.OperationStatusCode.html
@@ -223,6 +223,6 @@ the order they are declared.
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.html
index 9963427..1d9e13f 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HConstants.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.WeightComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.WeightComparator.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.WeightComparator.html
index bd3e111..00452fd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.WeightComparator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.WeightComparator.html
@@ -120,6 +120,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.html
index 7697627..fc8a58d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.HostAndWeight.html
@@ -202,6 +202,6 @@
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 


[32/51] [partial] hbase-site git commit: Published site at .

2017-12-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
index d405629..3ec93bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
@@ -371,1638 +371,1646 @@
 363if (params.getWriteBufferSize() == 
BufferedMutatorParams.UNSET) {
 364  
params.writeBufferSize(connectionConfig.getWriteBufferSize());
 365}
-366if (params.getMaxKeyValueSize() == 
BufferedMutatorParams.UNSET) {
-367  
params.maxKeyValueSize(connectionConfig.getMaxKeyValueSize());
-368}
-369// Look to see if an alternate 
BufferedMutation implementation is wanted.
-370// Look in params and in config. If 
null, use default.
-371String implementationClassName = 
params.getImplementationClassName();
-372if (implementationClassName == null) 
{
-373  implementationClassName = 
this.alternateBufferedMutatorClassName;
-374}
-375if (implementationClassName == null) 
{
-376  return new 
BufferedMutatorImpl(this, rpcCallerFactory, rpcControllerFactory, params);
-377}
-378try {
-379  return 
(BufferedMutator)ReflectionUtils.newInstance(Class.forName(implementationClassName),
-380  this, rpcCallerFactory, 
rpcControllerFactory, params);
-381} catch (ClassNotFoundException e) 
{
-382  throw new RuntimeException(e);
-383}
-384  }
-385
-386  @Override
-387  public BufferedMutator 
getBufferedMutator(TableName tableName) {
-388return getBufferedMutator(new 
BufferedMutatorParams(tableName));
-389  }
-390
-391  @Override
-392  public RegionLocator 
getRegionLocator(TableName tableName) throws IOException {
-393return new HRegionLocator(tableName, 
this);
-394  }
-395
-396  @Override
-397  public Admin getAdmin() throws 
IOException {
-398return new HBaseAdmin(this);
-399  }
-400
-401  @Override
-402  public MetricsConnection 
getConnectionMetrics() {
-403return this.metrics;
-404  }
-405
-406  private ExecutorService getBatchPool() 
{
-407if (batchPool == null) {
-408  synchronized (this) {
-409if (batchPool == null) {
-410  int threads = 
conf.getInt("hbase.hconnection.threads.max", 256);
-411  this.batchPool = 
getThreadPool(threads, threads, "-shared", null);
-412  this.cleanupPool = true;
-413}
-414  }
-415}
-416return this.batchPool;
-417  }
-418
-419  private ExecutorService 
getThreadPool(int maxThreads, int coreThreads, String nameHint,
-420  BlockingQueueRunnable 
passedWorkQueue) {
-421// shared HTable thread executor not 
yet initialized
-422if (maxThreads == 0) {
-423  maxThreads = 
Runtime.getRuntime().availableProcessors() * 8;
-424}
-425if (coreThreads == 0) {
-426  coreThreads = 
Runtime.getRuntime().availableProcessors() * 8;
-427}
-428long keepAliveTime = 
conf.getLong("hbase.hconnection.threads.keepalivetime", 60);
-429BlockingQueueRunnable 
workQueue = passedWorkQueue;
-430if (workQueue == null) {
-431  workQueue =
-432new 
LinkedBlockingQueue(maxThreads *
-433
conf.getInt(HConstants.HBASE_CLIENT_MAX_TOTAL_TASKS,
-434
HConstants.DEFAULT_HBASE_CLIENT_MAX_TOTAL_TASKS));
-435  coreThreads = maxThreads;
-436}
-437ThreadPoolExecutor tpe = new 
ThreadPoolExecutor(
-438coreThreads,
-439maxThreads,
-440keepAliveTime,
-441TimeUnit.SECONDS,
-442workQueue,
-443
Threads.newDaemonThreadFactory(toString() + nameHint));
-444tpe.allowCoreThreadTimeOut(true);
-445return tpe;
-446  }
-447
-448  private ExecutorService 
getMetaLookupPool() {
-449if (this.metaLookupPool == null) {
-450  synchronized (this) {
-451if (this.metaLookupPool == null) 
{
-452  //Some of the threads would be 
used for meta replicas
-453  //To start with, 
threads.max.core threads can hit the meta (including replicas).
-454  //After that, requests will get 
queued up in the passed queue, and only after
-455  //the queue is full, a new 
thread will be started
-456  int threads = 
conf.getInt("hbase.hconnection.meta.lookup.threads.max", 128);
-457  this.metaLookupPool = 
getThreadPool(
-458 threads,
-459 threads,
-460 "-metaLookup-shared-", new 
LinkedBlockingQueue());
-461}
-462  }
-463}
-464return this.metaLookupPool;
-465  }
-466
-467  protected ExecutorService 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
index a8b5ed6..e9c8924 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRegionLocator.html
@@ -144,7 +144,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 nonMetaRegionLocator
 
 
-private 
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
+private 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer
 retryTimer
 
 
@@ -162,8 +162,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
-AsyncRegionLocator(AsyncConnectionImplconn,
-  
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimerretryTimer)
+AsyncRegionLocator(AsyncConnectionImplconn,
+  
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer)
 
 
 
@@ -260,7 +260,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 retryTimer
-private 
finalorg.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer
+private 
finalorg.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer retryTimer
 
 
 
@@ -289,14 +289,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Constructor Detail
-
+
 
 
 
 
 AsyncRegionLocator
 AsyncRegionLocator(AsyncConnectionImplconn,
-   
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimerretryTimer)
+   
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
index 2e49153..04bc645 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
@@ -167,7 +167,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 pauseNs
 
 
-private 
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
+private 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer
 retryTimer
 
 
@@ -201,7 +201,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
-AsyncRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimerretryTimer,
+AsyncRpcRetryingCaller(org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer,
   AsyncConnectionImplconn,
   longpauseNs,
   intmaxAttempts,
@@ -291,7 +291,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 retryTimer
-private 
finalorg.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer retryTimer
+private 
finalorg.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer retryTimer
 
 
 
@@ -401,13 +401,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Constructor Detail
-
+
 
 
 
 
 AsyncRpcRetryingCaller
-publicAsyncRpcRetryingCaller(org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimerretryTimer,
+publicAsyncRpcRetryingCaller(org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimerretryTimer,
   AsyncConnectionImplconn,
   longpauseNs,
   intmaxAttempts,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
index 1383b56..a0ff115 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.html
@@ -183,7 +183,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 conn
 
 
-private 
org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer
+private 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer
 retryTimer
 
 
@@ -201,8 +201,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
-AsyncRpcRetryingCallerFactory(AsyncConnectionImplconn,
- 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
index edc3b06..c00cdc9 100644
--- a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
@@ -49,8 +49,8 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames
@@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class PrivateCellUtil.FirstOnRowColCell
+private static class PrivateCellUtil.FirstOnRowColCell
 extends PrivateCellUtil.FirstOnRowCell
 
 
@@ -150,7 +150,7 @@ extends Cell
-Cell.DataType
+Cell.Type
 
 
 
@@ -337,7 +337,7 @@ extends 
 
 FIXED_HEAPSIZE
-private static finallong FIXED_HEAPSIZE
+private static finallong FIXED_HEAPSIZE
 
 
 
@@ -346,7 +346,7 @@ extends 
 
 fArray
-private finalbyte[] fArray
+private finalbyte[] fArray
 
 
 
@@ -355,7 +355,7 @@ extends 
 
 foffset
-private finalint foffset
+private finalint foffset
 
 
 
@@ -364,7 +364,7 @@ extends 
 
 flength
-private finalbyte flength
+private finalbyte flength
 
 
 
@@ -373,7 +373,7 @@ extends 
 
 qArray
-private finalbyte[] qArray
+private finalbyte[] qArray
 
 
 
@@ -382,7 +382,7 @@ extends 
 
 qoffset
-private finalint qoffset
+private finalint qoffset
 
 
 
@@ -391,7 +391,7 @@ extends 
 
 qlength
-private finalint qlength
+private finalint qlength
 
 
 
@@ -408,7 +408,7 @@ extends 
 
 FirstOnRowColCell
-publicFirstOnRowColCell(byte[]rArray,
+publicFirstOnRowColCell(byte[]rArray,
  introffset,
  shortrlength,
  byte[]fArray,
@@ -433,7 +433,7 @@ extends 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Specified by:
 heapSizein
 interfaceHeapSize
@@ -451,7 +451,7 @@ extends 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -471,7 +471,7 @@ extends 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Specified by:
 getFamilyOffsetin
 interfaceCell
@@ -488,7 +488,7 @@ extends 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -505,7 +505,7 @@ extends 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -524,7 +524,7 @@ extends 
 
 getQualifierOffset
-publicintgetQualifierOffset()
+publicintgetQualifierOffset()
 
 Specified by:
 getQualifierOffsetin
 interfaceCell
@@ -541,7 +541,7 @@ extends 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Specified by:
 getQualifierLengthin
 interfaceCell
@@ -580,8 +580,8 @@ extends 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
deleted file mode 100644
index 7f29d95..000
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
+++ /dev/null
@@ -1,462 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-PrivateCellUtil.FirstOnRowColTSByteBufferCell (Apache HBase 
3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = {"i0":10,"i1":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Stable
-public final class ReplicationPeerConfigUtil
+public final class ReplicationPeerConfigUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Helper for TableCFs Operations.
 
@@ -171,8 +171,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Method and Description
 
 
-static void
-appendTableCFsToReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCfs,
+static ReplicationPeerConfig
+appendTableCFsToReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCfs,
  ReplicationPeerConfigpeerConfig)
 
 
@@ -216,50 +216,54 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 convertToString(http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringnamespaces)
 
 
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+copyTableCFsMap(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringpreTableCfs)
+
+
 static 
org.apache.hadoop.conf.Configuration
 getPeerClusterConfiguration(org.apache.hadoop.conf.Configurationconf,
ReplicationPeerDescriptionpeer)
 Returns the configuration needed to talk to the remote 
slave cluster.
 
 
-
+
 static 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TableCF
 getTableCF(org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TableCF[]tableCFs,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable)
 Get TableCF in TableCFs, if not exist, return null.
 
 
-
+
 static ReplicationPeerConfig
 parsePeerFrom(byte[]bytes)
 
-
+
 static 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TableCF[]
 parseTableCFs(byte[]bytes)
 Parse bytes into TableCFs.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/apidocs/src-html/org/apache/hadoop/hbase/ClusterMetrics.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ClusterMetrics.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ClusterMetrics.html
new file mode 100644
index 000..2996037
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ClusterMetrics.html
@@ -0,0 +1,270 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019
+020package org.apache.hadoop.hbase;
+021
+022import 
edu.umd.cs.findbugs.annotations.Nullable;
+023import java.util.List;
+024import java.util.Map;
+025import 
org.apache.hadoop.hbase.client.RegionInfo;
+026import 
org.apache.hadoop.hbase.master.RegionState;
+027import 
org.apache.yetus.audience.InterfaceAudience;
+028
+029/**
+030 * Metrics information on the HBase 
cluster.
+031 * p
+032 * ttClusterMetrics/tt 
provides clients with information such as:
+033 * ul
+034 * liThe count and names of 
region servers in the cluster./li
+035 * liThe count and names of dead 
region servers in the cluster./li
+036 * liThe name of the active 
master for the cluster./li
+037 * liThe name(s) of the backup 
master(s) for the cluster, if they exist./li
+038 * liThe average cluster 
load./li
+039 * liThe number of regions 
deployed on the cluster./li
+040 * liThe number of requests since 
last report./li
+041 * liDetailed region server 
loading and resource usage information,
+042 *  per server and per 
region./li
+043 * liRegions in transition at 
master/li
+044 * liThe unique cluster 
ID/li
+045 * /ul
+046 * tt{@link Option}/tt 
provides a way to get desired ClusterStatus information.
+047 * The following codes will get all the 
cluster information.
+048 * pre
+049 * {@code
+050 * // Original version still works
+051 * Admin admin = connection.getAdmin();
+052 * ClusterMetrics metrics = 
admin.getClusterStatus();
+053 * // or below, a new version which has 
the same effects
+054 * ClusterMetrics metrics = 
admin.getClusterStatus(EnumSet.allOf(Option.class));
+055 * }
+056 * /pre
+057 * If information about live servers is 
the only wanted.
+058 * then codes in the following way:
+059 * pre
+060 * {@code
+061 * Admin admin = connection.getAdmin();
+062 * ClusterMetrics metrics = 
admin.getClusterStatus(EnumSet.of(Option.LIVE_SERVERS));
+063 * }
+064 * /pre
+065 */
+066@InterfaceAudience.Public
+067public interface ClusterMetrics {
+068
+069  /**
+070   * @return the HBase version string as 
reported by the HMaster
+071   */
+072  @Nullable
+073  String getHBaseVersion();
+074
+075  /**
+076   * @return the names of region servers 
on the dead list
+077   */
+078  ListServerName 
getDeadServerNames();
+079
+080  /**
+081   * @return the names of region servers 
on the live list
+082   */
+083  MapServerName, ServerMetrics 
getLiveServerMetrics();
+084
+085  /**
+086   * @return the number of regions 
deployed on the cluster
+087   */
+088  default int getRegionCount() {
+089return 
getLiveServerMetrics().entrySet().stream()
+090.mapToInt(v - 
v.getValue().getRegionMetrics().size()).sum();
+091  }
+092
+093  /**
+094   * @return the number of requests since 
last report
+095   */
+096  default long getRequestCount() {
+097return 
getLiveServerMetrics().entrySet().stream()
+098.flatMap(v - 
v.getValue().getRegionMetrics().values().stream())
+099
.mapToLong(RegionMetrics::getRequestCount).sum();
+100  }
+101
+102  /**
+103   * Returns detailed information about 
the current master {@link ServerName}.
+104   * @return current master information 
if it exists
+105   */
+106  @Nullable
+107  ServerName getMasterName();
+108
+109  /**
+110   * @return the names of backup 
masters
+111   */
+112  ListServerName 
getBackupMasterNames();
+113
+114  @InterfaceAudience.Private
+115  ListRegionState 
getRegionStatesInTransition();
+116
+117  @Nullable
+118  String getClusterId();
+119
+120  ListString 
getMasterCoprocessorNames();
+121
+122  default long 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html
index 342840a..4c42b96 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html
@@ -27,224 +27,223 @@
 019package 
org.apache.hadoop.hbase.mapreduce;
 020
 021import java.io.IOException;
-022
-023import org.apache.commons.logging.Log;
-024import 
org.apache.commons.logging.LogFactory;
-025import 
org.apache.yetus.audience.InterfaceAudience;
-026import 
org.apache.hadoop.conf.Configurable;
-027import 
org.apache.hadoop.conf.Configuration;
-028import 
org.apache.hadoop.hbase.HBaseConfiguration;
-029import 
org.apache.hadoop.hbase.HConstants;
-030import 
org.apache.hadoop.hbase.TableName;
-031import 
org.apache.hadoop.hbase.TableNotEnabledException;
-032import 
org.apache.hadoop.hbase.TableNotFoundException;
-033import 
org.apache.hadoop.hbase.client.Admin;
-034import 
org.apache.hadoop.hbase.client.BufferedMutator;
-035import 
org.apache.hadoop.hbase.client.Connection;
-036import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-037import 
org.apache.hadoop.hbase.client.Delete;
-038import 
org.apache.hadoop.hbase.client.Mutation;
-039import 
org.apache.hadoop.hbase.client.Put;
-040import 
org.apache.hadoop.mapreduce.JobContext;
-041import 
org.apache.hadoop.mapreduce.OutputCommitter;
-042import 
org.apache.hadoop.mapreduce.OutputFormat;
-043import 
org.apache.hadoop.mapreduce.RecordWriter;
-044import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-045
-046/**
-047 * Convert Map/Reduce output and write it 
to an HBase table. The KEY is ignored
-048 * while the output value 
umust/u be either a {@link Put} or a
-049 * {@link Delete} instance.
-050 */
-051@InterfaceAudience.Public
-052public class TableOutputFormatKEY 
extends OutputFormatKEY, Mutation
-053implements Configurable {
-054
-055  private static final Log LOG = 
LogFactory.getLog(TableOutputFormat.class);
-056
-057  /** Job parameter that specifies the 
output table. */
-058  public static final String OUTPUT_TABLE 
= "hbase.mapred.outputtable";
-059
-060  /**
-061   * Prefix for configuration property 
overrides to apply in {@link #setConf(Configuration)}.
-062   * For keys matching this prefix, the 
prefix is stripped, and the value is set in the
-063   * configuration with the resulting 
key, ie. the entry "hbase.mapred.output.key1 = value1"
-064   * would be set in the configuration as 
"key1 = value1".  Use this to set properties
-065   * which should only be applied to the 
{@code TableOutputFormat} configuration and not the
-066   * input configuration.
-067   */
-068  public static final String 
OUTPUT_CONF_PREFIX = "hbase.mapred.output.";
-069
-070  /**
-071   * Optional job parameter to specify a 
peer cluster.
-072   * Used specifying remote cluster when 
copying between hbase clusters (the
-073   * source is picked up from 
codehbase-site.xml/code).
-074   * @see 
TableMapReduceUtil#initTableReducerJob(String, Class, 
org.apache.hadoop.mapreduce.Job, Class, String, String, String)
-075   */
-076  public static final String 
QUORUM_ADDRESS = OUTPUT_CONF_PREFIX + "quorum";
-077
-078  /** Optional job parameter to specify 
peer cluster's ZK client port */
-079  public static final String QUORUM_PORT 
= OUTPUT_CONF_PREFIX + "quorum.port";
-080
-081  /** Optional specification of the rs 
class name of the peer cluster */
-082  public static final String
-083  REGION_SERVER_CLASS = 
OUTPUT_CONF_PREFIX + "rs.class";
-084  /** Optional specification of the rs 
impl name of the peer cluster */
-085  public static final String
-086  REGION_SERVER_IMPL = 
OUTPUT_CONF_PREFIX + "rs.impl";
-087
-088  /** The configuration. */
-089  private Configuration conf = null;
-090
-091  /**
-092   * Writes the reducer output to an 
HBase table.
-093   */
-094  protected class TableRecordWriter
-095  extends RecordWriterKEY, 
Mutation {
-096
-097private Connection connection;
-098private BufferedMutator mutator;
-099
-100/**
-101 * @throws IOException
-102 *
-103 */
-104public TableRecordWriter() throws 
IOException {
-105  String tableName = 
conf.get(OUTPUT_TABLE);
-106  this.connection = 
ConnectionFactory.createConnection(conf);
-107  this.mutator = 
connection.getBufferedMutator(TableName.valueOf(tableName));
-108  LOG.info("Created table instance 
for "  + tableName);
-109}
-110/**
-111 * Closes the writer, in this case 
flush table commits.
-112 *
-113 * @param context  The context.
-114 * @throws IOException When closing 
the writer fails.
-115 * @see 
RecordWriter#close(TaskAttemptContext)
-116  

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html
index 332eacd..b0b983c 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HRegionInfo.html
@@ -128,905 +128,929 @@
 120  /**
 121   * @return Return a short, printable 
name for this region (usually encoded name) for us logging.
 122   */
-123  public String getShortNameToLog() {
-124return 
prettyPrint(this.getEncodedName());
-125  }
-126
-127  /**
-128   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0
-129   * Use {@link 
org.apache.hadoop.hbase.client.RegionInfo#getShortNameToLog(RegionInfo...)}.
-130   */
-131  @Deprecated
-132  public static String 
getShortNameToLog(HRegionInfo...hris) {
-133return 
RegionInfo.getShortNameToLog(Arrays.asList(hris));
-134  }
-135
-136  /**
-137   * @return Return a String of short, 
printable names for codehris/code
-138   * (usually encoded name) for us 
logging.
-139   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0
-140   * Use {@link 
org.apache.hadoop.hbase.client.RegionInfo#getShortNameToLog(List)})}.
-141   */
-142  @Deprecated
-143  public static String 
getShortNameToLog(final ListHRegionInfo hris) {
-144return 
RegionInfo.getShortNameToLog(hris.stream().collect(Collectors.toList()));
-145  }
-146
-147  /**
-148   * Use logging.
-149   * @param encodedRegionName The encoded 
regionname.
-150   * @return 
codehbase:meta/code if passed 
code1028785192/code else returns
-151   * 
codeencodedRegionName/code
-152   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0
-153   * Use {@link 
RegionInfo#prettyPrint(String)}.
-154   */
-155  @Deprecated
-156  @InterfaceAudience.Private
-157  public static String prettyPrint(final 
String encodedRegionName) {
-158return 
RegionInfo.prettyPrint(encodedRegionName);
-159  }
-160
-161  private byte [] endKey = 
HConstants.EMPTY_BYTE_ARRAY;
-162  // This flag is in the parent of a 
split while the parent is still referenced
-163  // by daughter regions.  We USED to set 
this flag when we disabled a table
-164  // but now table state is kept up in 
zookeeper as of 0.90.0 HBase.
-165  private boolean offLine = false;
-166  private long regionId = -1;
-167  private transient byte [] regionName = 
HConstants.EMPTY_BYTE_ARRAY;
-168  private boolean split = false;
-169  private byte [] startKey = 
HConstants.EMPTY_BYTE_ARRAY;
-170  private int hashCode = -1;
-171  //TODO: Move NO_HASH to HStoreFile 
which is really the only place it is used.
-172  public static final String NO_HASH = 
null;
-173  private String encodedName = null;
-174  private byte [] encodedNameAsBytes = 
null;
-175  private int replicaId = 
DEFAULT_REPLICA_ID;
-176
-177  // Current TableName
-178  private TableName tableName = null;
-179
-180  // Duplicated over in 
RegionInfoDisplay
-181  final static String DISPLAY_KEYS_KEY = 
RegionInfoDisplay.DISPLAY_KEYS_KEY;
-182  public final static byte[] 
HIDDEN_END_KEY = RegionInfoDisplay.HIDDEN_END_KEY;
-183  public final static byte[] 
HIDDEN_START_KEY = RegionInfoDisplay.HIDDEN_START_KEY;
-184
-185  /** HRegionInfo for first meta region 
*/
-186  // TODO: How come Meta regions still do 
not have encoded region names? Fix.
-187  public static final HRegionInfo 
FIRST_META_REGIONINFO =
-188  new HRegionInfo(1L, 
TableName.META_TABLE_NAME);
-189
-190  private void setHashCode() {
-191int result = 
Arrays.hashCode(this.regionName);
-192result ^= this.regionId;
-193result ^= 
Arrays.hashCode(this.startKey);
-194result ^= 
Arrays.hashCode(this.endKey);
-195result ^= 
Boolean.valueOf(this.offLine).hashCode();
-196result ^= 
Arrays.hashCode(this.tableName.getName());
-197result ^= this.replicaId;
-198this.hashCode = result;
-199  }
-200
-201  /**
-202   * Private constructor used 
constructing HRegionInfo for the
-203   * first meta regions
-204   */
-205  private HRegionInfo(long regionId, 
TableName tableName) {
-206this(regionId, tableName, 
DEFAULT_REPLICA_ID);
-207  }
-208
-209  public HRegionInfo(long regionId, 
TableName tableName, int replicaId) {
-210super();
-211this.regionId = regionId;
-212this.tableName = tableName;
-213this.replicaId = replicaId;
-214// Note: First Meta region replicas 
names are in old format
-215this.regionName = 
createRegionName(tableName, null, regionId, replicaId, false);
-216setHashCode();
-217  }
-218
-219  public HRegionInfo(final TableName 
tableName) {
-220this(tableName, null, null);
-221  }
-222
-223  /**
-224   * Construct HRegionInfo with explicit 
parameters
-225   *
-226   * @param tableName the table 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 72f912c..69740b5 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -207,8 +207,8 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
+org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
 org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaType

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 22688c5..e53718e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static interface RSRpcServices.LogDelegate
+static interface RSRpcServices.LogDelegate
 
 
 
@@ -151,7 +151,7 @@ var activeTableTab = "activeTableTab";
 
 
 logBatchWarning
-voidlogBatchWarning(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfirstRegionName,
+voidlogBatchWarning(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfirstRegionName,
  intsum,
  introwSizeWarnThreshold)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
index f88a65b..89abfe7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class RSRpcServices.RegionScannerCloseCallBack
+private static final class RSRpcServices.RegionScannerCloseCallBack
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RpcCallback
 An Rpc callback for closing a RegionScanner.
@@ -209,7 +209,7 @@ implements 
 
 scanner
-private finalRegionScanner scanner
+private finalRegionScanner scanner
 
 
 
@@ -226,7 +226,7 @@ implements 
 
 RegionScannerCloseCallBack
-publicRegionScannerCloseCallBack(RegionScannerscanner)
+publicRegionScannerCloseCallBack(RegionScannerscanner)
 
 
 
@@ -243,7 +243,7 @@ implements 
 
 run
-publicvoidrun()
+publicvoidrun()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:RpcCallback
 Called at the end of an Rpc Call RpcCallContext

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
index a6a7c22..d6e269b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.html
index c573154..80d44f3 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value={"Coprocesssor","Phoenix"})
-public abstract class ReaderBase
+public abstract class ReaderBase
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AbstractFSWALProvider.Reader
 
@@ -306,7 +306,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -315,7 +315,7 @@ implements 
 
 conf
-protectedorg.apache.hadoop.conf.Configuration conf
+protectedorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -324,7 +324,7 @@ implements 
 
 fs
-protectedorg.apache.hadoop.fs.FileSystem fs
+protectedorg.apache.hadoop.fs.FileSystem fs
 
 
 
@@ -333,7 +333,7 @@ implements 
 
 path
-protectedorg.apache.hadoop.fs.Path path
+protectedorg.apache.hadoop.fs.Path path
 
 
 
@@ -342,7 +342,7 @@ implements 
 
 edit
-protectedlong edit
+protectedlong edit
 
 
 
@@ -351,7 +351,7 @@ implements 
 
 fileLength
-protectedlong fileLength
+protectedlong fileLength
 
 
 
@@ -360,7 +360,7 @@ implements 
 
 compressionContext
-protectedCompressionContext compressionContext
+protectedCompressionContext compressionContext
 Compression context to use reading.  Can be null if no 
compression.
 
 
@@ -370,7 +370,7 @@ implements 
 
 emptyCompressionContext
-protectedboolean emptyCompressionContext
+protectedboolean emptyCompressionContext
 
 
 
@@ -387,7 +387,7 @@ implements 
 
 ReaderBase
-publicReaderBase()
+publicReaderBase()
 Default constructor.
 
 
@@ -405,7 +405,7 @@ implements 
 
 init
-publicvoidinit(org.apache.hadoop.fs.FileSystemfs,
+publicvoidinit(org.apache.hadoop.fs.FileSystemfs,
  org.apache.hadoop.fs.Pathpath,
  org.apache.hadoop.conf.Configurationconf,
  org.apache.hadoop.fs.FSDataInputStreamstream)
@@ -429,7 +429,7 @@ implements 
 
 next
-publicWAL.Entrynext()
+publicWAL.Entrynext()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -445,7 +445,7 @@ implements 
 
 next
-publicWAL.Entrynext(WAL.Entryreuse)
+publicWAL.Entrynext(WAL.Entryreuse)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -461,7 +461,7 @@ implements 
 
 seek
-publicvoidseek(longpos)
+publicvoidseek(longpos)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -477,7 +477,7 @@ implements 
 
 initReader
-protected abstracthttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringinitReader(org.apache.hadoop.fs.FSDataInputStreamstream)
+protected abstracthttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringinitReader(org.apache.hadoop.fs.FSDataInputStreamstream)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Initializes the log reader with a particular stream (may be 
null).
  Reader assumes ownership of the stream if not null and may use it. Called 
once.
@@ -495,7 +495,7 @@ implements 
 
 initAfterCompression
-protected abstractvoidinitAfterCompression()
+protected abstractvoidinitAfterCompression()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Initializes the compression after the shared stuff has been 
initialized. Called once.
 
@@ -510,7 +510,7 @@ implements 
 
 initAfterCompression
-protected abstractvoidinitAfterCompression(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringcellCodecClsName)
+protected abstractvoidinitAfterCompression(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringcellCodecClsName)
   throws 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 198c0e8..723431c 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 3453
 0
 0
-20619
+20567
 
 Files
 
@@ -454,55 +454,45 @@
 0
 1
 
-org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java
-0
-0
-1
-
 org/apache/hadoop/hbase/GenericTestUtils.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/HBaseCluster.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/HBaseClusterManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/HBaseConfiguration.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/HBaseTestCase.java
 0
 0
 35
-
+
 org/apache/hadoop/hbase/HBaseTestingUtility.java
 0
 0
 267
-
+
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
 0
 40
-
-org/apache/hadoop/hbase/HConstants.java
-0
-0
-4
 
 org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
 0
@@ -1302,7 +1292,7 @@
 org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 0
 0
-25
+23
 
 org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
 0
@@ -1709,3980 +1699,3970 @@
 0
 1
 
-org/apache/hadoop/hbase/client/ClusterStatusListener.java
-0
-0
-3
-
 org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
 0
 0
 54
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/CompleteScanResultCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionFactory.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/ConnectionUtils.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
 92
-
+
 org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
-67
-
+46
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ImmutableHRegionInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/Increment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MetaCache.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/client/MultiAction.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/MultiResponse.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Operation.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/OperationWithAttributes.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/Put.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/Query.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/QuotaStatusCalls.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
 0
 0
 102
-
+
 org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannelImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/RegionInfo.java
 0
 0
 55
-
+
 org/apache/hadoop/hbase/client/RegionInfoBuilder.java
 0
 0
-7
-
+6
+
 org/apache/hadoop/hbase/client/RegionInfoDisplay.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/client/RegionLocator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RegionReplicaUtil.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/RegionServerCallable.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/RegionServerCoprocessorRpcChannelImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/RequestController.java
 0
 0
 4
-
+
 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index b98..6853af1 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3705,21 +3705,21 @@
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 date
-"Wed Dec  6 14:42:16 UTC 2017"
+"Sat Dec  9 14:42:54 UTC 2017"
 
 
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 revision
-"ebd8841e0ee9ca1ab7b6dab55178761360b8d85a"
+"c98bab51de22a20990ff8805825e8638e0686464"
 
 
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 srcChecksum
-"3a84e36c1447b2269eb4eb0e7f8272a2"
+"fe57a4ce20b5a7072155e3899b6cdb15"
 
 
 
@@ -5043,89 +5043,75 @@
 "CACHE_BLOOMS_ON_WRITE"
 
 
-
-
-publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-CACHE_DATA_IN_L1
-"CACHE_DATA_IN_L1"
-
-
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_DATA_ON_WRITE
 "CACHE_DATA_ON_WRITE"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_INDEX_ON_WRITE
 "CACHE_INDEX_ON_WRITE"
 
-
+
 
 
 privatestaticfinalbyte
 COLUMN_DESCRIPTOR_VERSION
 11
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPRESS_TAGS
 "COMPRESS_TAGS"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPRESSION
 "COMPRESSION"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPRESSION_COMPACT
 "COMPRESSION_COMPACT"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DATA_BLOCK_ENCODING
 "DATA_BLOCK_ENCODING"
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_BLOCKCACHE
 true
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_BLOCKSIZE
 65536
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_CACHE_BLOOMS_ON_WRITE
 false
 
-
-
-
-publicstaticfinalboolean
-DEFAULT_CACHE_DATA_IN_L1
-false
-
 
 
 
@@ -9073,75 +9059,61 @@
 "hbase.bucketcache.bucket.sizes"
 
 
-
-
-publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-BUCKET_CACHE_COMBINED_KEY
-"hbase.bucketcache.combinedcache.enabled"
-
-
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BUCKET_CACHE_PERSISTENT_PATH_KEY
 "hbase.bucketcache.persistent.path"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BUCKET_CACHE_WRITER_QUEUE_KEY
 "hbase.bucketcache.writer.queuelength"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BUCKET_CACHE_WRITER_THREADS_KEY
 "hbase.bucketcache.writer.threads"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_BLOCKS_ON_WRITE_KEY
 "hbase.rs.cacheblocksonwrite"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_BLOOM_BLOCKS_ON_WRITE_KEY
 "hfile.block.bloom.cacheonwrite"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_DATA_BLOCKS_COMPRESSED_KEY
 "hbase.block.data.cachecompressed"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_DATA_ON_READ_KEY
 "hbase.block.data.cacheonread"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CACHE_INDEX_BLOCKS_ON_WRITE_KEY
 "hfile.block.index.cacheonwrite"
 
-
-
-
-publicstaticfinalboolean
-DEFAULT_BUCKET_CACHE_COMBINED
-true
-
 
 
 
@@ 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
index d615cc5..c25b53c 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
@@ -1007,7 +1007,7 @@ default
 
 preCompactSelection
-defaultvoidpreCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
  Storestore,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends StoreFilecandidates,
  CompactionLifeCycleTrackertracker)
@@ -1016,7 +1016,8 @@ defaultParameters:
 c - the environment provided by the region server
@@ -1034,7 +1035,7 @@ default
 
 postCompactSelection
-defaultvoidpostCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
   Storestore,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends StoreFileselected,
   CompactionLifeCycleTrackertracker,
@@ -1057,7 +1058,7 @@ default
 
 preCompactScannerOpen
-defaultvoidpreCompactScannerOpen(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreCompactScannerOpen(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
ScanTypescanType,
ScanOptionsoptions,
@@ -1085,7 +1086,7 @@ default
 
 preCompact
-defaultInternalScannerpreCompact(ObserverContextRegionCoprocessorEnvironmentc,
+defaultInternalScannerpreCompact(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
InternalScannerscanner,
ScanTypescanType,
@@ -1121,7 +1122,7 @@ default
 
 postCompact
-defaultvoidpostCompact(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostCompact(ObserverContextRegionCoprocessorEnvironmentc,
  Storestore,
  StoreFileresultFile,
  CompactionLifeCycleTrackertracker,
@@ -1146,7 +1147,7 @@ default
 
 preClose
-defaultvoidpreClose(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreClose(ObserverContextRegionCoprocessorEnvironmentc,
   booleanabortRequested)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called before the region is reported as closed to the 
master.
@@ -1165,7 +1166,7 @@ default
 
 postClose
-defaultvoidpostClose(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostClose(ObserverContextRegionCoprocessorEnvironmentc,
booleanabortRequested)
 Called after the region is reported as closed to the 
master.
 
@@ -1181,13 +1182,14 @@ default
 
 preGetOp
-defaultvoidpreGetOp(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreGetOp(ObserverContextRegionCoprocessorEnvironmentc,
   Getget,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellresult)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called before the client performs a Get
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Parameters:
 c - the environment provided by the region server
@@ -1206,7 +1208,7 @@ default
 
 postGetOp
-defaultvoidpostGetOp(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostGetOp(ObserverContextRegionCoprocessorEnvironmentc,
Getget,
http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellresult)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -1230,13 +1232,14 @@ default
 
 preExists
-defaultbooleanpreExists(ObserverContextRegionCoprocessorEnvironmentc,

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
index 81f352c..0782ea7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
@@ -51,39 +51,39 @@
 043import 
java.util.concurrent.ConcurrentHashMap;
 044import 
java.util.concurrent.ConcurrentMap;
 045import 
java.util.concurrent.ThreadLocalRandom;
-046
-047import 
javax.security.sasl.SaslException;
-048
-049import org.apache.commons.logging.Log;
-050import 
org.apache.commons.logging.LogFactory;
-051import 
org.apache.hadoop.conf.Configuration;
-052import 
org.apache.hadoop.hbase.CellScanner;
-053import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-054import 
org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
-055import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
-056import 
org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
-057import 
org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
-058import 
org.apache.hadoop.hbase.security.SaslUtil;
-059import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-060import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-061import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
-070import 
org.apache.hadoop.hbase.trace.TraceUtil;
-071import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-072import 
org.apache.hadoop.hbase.util.ExceptionUtil;
-073import org.apache.hadoop.io.IOUtils;
-074import 
org.apache.hadoop.ipc.RemoteException;
-075import org.apache.hadoop.net.NetUtils;
-076import 
org.apache.hadoop.security.UserGroupInformation;
-077import 
org.apache.htrace.core.TraceScope;
-078import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
javax.security.sasl.SaslException;
+047import org.apache.commons.logging.Log;
+048import 
org.apache.commons.logging.LogFactory;
+049import 
org.apache.hadoop.conf.Configuration;
+050import 
org.apache.hadoop.hbase.CellScanner;
+051import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+052import 
org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
+053import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
+054import 
org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
+055import 
org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
+056import 
org.apache.hadoop.hbase.security.SaslUtil;
+057import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
+058import 
org.apache.hadoop.hbase.trace.TraceUtil;
+059import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+060import 
org.apache.hadoop.hbase.util.ExceptionUtil;
+061import org.apache.hadoop.io.IOUtils;
+062import 
org.apache.hadoop.ipc.RemoteException;
+063import org.apache.hadoop.net.NetUtils;
+064import 
org.apache.hadoop.security.UserGroupInformation;
+065import 
org.apache.hadoop.util.StringUtils;
+066import 
org.apache.htrace.core.TraceScope;
+067import 
org.apache.yetus.audience.InterfaceAudience;
+068
+069import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
+070import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
+071import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+072import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
+074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
+075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
+076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
+077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
+078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
 079
 080/**
 081 * Thread that reads responses and 
notifies callers. Each connection owns a socket connected to a
@@ -271,533 +271,545 @@
 263/*
 264 * The max number of retries is 

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
index e81ea36..e1c6c45 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":9,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":9,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RegionStates
+public class RegionStates
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 RegionStates contains a set of Maps that describes the 
in-memory state of the AM, with
  the regions available in the system, the region in transition, the offline 
regions and
@@ -257,8 +257,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 RegionStates.ServerStateNode
-addRegionToServer(ServerNameserverName,
- RegionStates.RegionStateNoderegionNode)
+addRegionToServer(RegionStates.RegionStateNoderegionNode)
 
 
 RegionStates.RegionFailedOpen
@@ -274,222 +273,214 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 protected RegionStates.RegionStateNode
-createRegionNode(RegionInforegionInfo)
+createRegionStateNode(RegionInforegionInfo)
 
 
-private RegionState
-createRegionState(RegionStates.RegionStateNodenode)
-
-
 void
 deleteRegion(RegionInforegionInfo)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 getAssignedRegions()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 getAssignmentsByTable()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 getAssignmentsByTable(booleanforceByCluster)
 This is an EXPENSIVE clone.
 
 
-
+
 double
 getAverageLoad()
 
-
+
 RegionStates.RegionFailedOpen
 getFailedOpen(RegionInforegionInfo)
 
-
+
 protected RegionStates.RegionStateNode
-getOrCreateRegionNode(RegionInforegionInfo)
+getOrCreateRegionStateNode(RegionInforegionInfo)
 
-
+
 RegionStates.ServerStateNode
 getOrCreateServer(ServerNameserverName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionInfo,ServerName
 getRegionAssignments()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionState.State,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 getRegionByStateOfTable(TableNametableName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionState
 

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
index 4e26db8..8067f23 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncTable.html
@@ -179,7 +179,7 @@ public interface 
 Thttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureT
 batch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions)
-Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
+Method that does a batch call on Deletes, Gets, Puts, 
Increments, Appends and RowMutations.
 
 
 
@@ -987,13 +987,13 @@ public interface 
 batch
 Thttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureTbatch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions)
-Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends. The ordering of
- execution of the actions is not defined. Meaning if you do a Put and a Get in 
the same
- batch(java.util.List?
 extends org.apache.hadoop.hbase.client.Row) call, you will not 
necessarily be guaranteed that the Get returns what the Put
- had put.
+Method that does a batch call on Deletes, Gets, Puts, 
Increments, Appends and RowMutations. The
+ ordering of execution of the actions is not defined. Meaning if you do a Put 
and a Get in the
+ same batch(java.util.List?
 extends org.apache.hadoop.hbase.client.Row) call, you will not 
necessarily be guaranteed that the Get returns what the
+ Put had put.
 
 Parameters:
-actions - list of Get, Put, Delete, Increment, Append 
objects
+actions - list of Get, Put, Delete, Increment, Append, and 
RowMutations objects
 Returns:
 A list of http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutures that represent the 
result for each action.
 
@@ -1010,7 +1010,7 @@ public interface Parameters:
-actions - list of Get, Put, Delete, Increment, Append 
objects
+actions - list of Get, Put, Delete, Increment, Append and 
RowMutations objects
 Returns:
 A list of the result for the actions. Wrapped by a http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFuture.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
index 0500d46..d472bcd 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html
@@ -205,7 +205,7 @@ implements 
 Thttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureT
 batch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions)
-Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
+Method that does a batch call on Deletes, Gets, Puts, 
Increments, Appends and RowMutations.
 
 
 
@@ -896,15 +896,15 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureTbatch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions)
 Description copied from 
interface:AsyncTable
-Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends. The ordering of
- 

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
index c2b4126..aa7897b 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -1456,7 +1456,7 @@ implements MasterObserver
-postAddReplicationPeer,
 postAddRSGroup,
 postAssign,
 postBalance,
 postBalanceRSGroup, postBalanceSwitch,
 postClearDeadServers,
 postCloneSnapshot,
 postCompletedDeleteTableAction,
 postCompletedDisableTableAction,
 postCompletedEnableTableAction,
 postCompletedMergeRegionsAction,
 postCompletedModifyTableAction
 , postCompletedSplitRegionAction,
 postCompletedTruncateTableAction,
 postCreateNamespace,
 postCreateTable, postDecommissionRegionServers,
 postDeleteSnapshot,
 postDisableReplicationPeer,
 postDisableTable,
 postEnableReplicationPeer,
 postEnableTable,
 postGetClusterStatus,
 postGetLocks,
 postGetNamespaceDescriptor,
 postGetProcedures,
 postGetReplicationPeerConfig,
 postListDecommissionedRegionServers,
 postListReplicationPeers,
 postListSn
 apshot, postLockHeartbeat,
 postMergeRegions,
 postMergeRegionsCommitAction,
 postModifyNamespace,
 postMove,
 postMoveServers,
 postMoveServersAndTables,
 postMoveTables,
 postRecommissionRegionServer,
 postRegionOffline,
 postRemoveReplicationPeer,
 postRemoveRSGroup,
 postRequest
 Lock, postRestoreSnapshot,
 postRollBackMergeRegionsAction,
 postRollBackSplitRegionAction,
 postSetNamespaceQuota,
 postSetTableQuota,
 postSetUserQuota,
 postSetUserQuota,
 postSetUserQuota, postSnapshot,
 postTableFlush,
 postUnassign,
 postUpdateReplica
 tionPeerConfig, preCreateTableAction,
 preDeleteTableAction,
 preDisableTableAction,
 preEnableTableAction,
 preGetTableNames,
 preListNamespaceDescriptors,
 preMasterInitialization,
 preMergeRegionsAction,
 preMergeRegionsCommitAction, preModifyTableAction,
 preSplitRegionAction,
 preSplitRegionAfterMETAAction,
 preSplitRegionBeforeMETAAction,
 preTruncateTableAction
+postAddReplicationPeer,
 postAddRSGroup,
 postAssign,
 postBalance,
 postBalanceRSGroup, postBalanceSwitch,
 postClearDeadServers,
 postCloneSnapshot,
 postCompletedDeleteTableAction,
 postCompletedDisableTableAction,
 postCompletedEnableTableAction,
 postCompletedMergeRegionsAction,
 
 postCompletedModifyTableAction, postCompletedSplitRegionAction,
 postCompletedTruncateTableAction,
 postCreateNamespace,
 postCreateTable, postDecommissionRegionServers,
 postDeleteSnapshot,
 postDisableReplicationPeer,
 postDisableTable,
 postEnableReplicationPeer,
 postEnableTable,
 postGetClusterStatus,
 postGetLocks,
 postGetNamespaceDescriptor,
 postGetProcedures,
 postGetReplicationPeerConfig,
 postListDecommissionedRegionServers,
 postListReplicationPeers,
 postListSnapshot, postLockHeartbeat,
 postMergeRegions,
 postMergeRegionsCommitAction,
 postModifyNamespace,
 postMove,
 postMoveServers,
 postMoveServersAndTables,
 postMoveTables,
 postRecommissionRegionServer,
 postRegionOffline,
 postRemoveReplicationPeer,
 postRemoveRSGroup,
 postRequestLock, postRestoreSnapshot,
 postRollBackMergeRegionsAction,
 postRollBackSplitRegionAction,
 
 postSetNamespaceQuota, postSetTableQuota,
 postSetUserQuota,
 postSetUserQuota,
 postSetUserQuota, postSnapshot,
 postTableFlush,
 postUnassign,
 postUpdateReplicationPeerConfig, preCreateTableAction,
 preDeleteTableAction,
 preDisableTableAction,
 preEnableTableAction,
 preGetTableNames,
 preListNamespaceDescriptors,
 preMasterInitialization,
 preMergeRegionsAction,
 preMergeRegionsCommitAction,
 preModifyTableAction,
 preSplitRegionAction,
 preSplitRegionAfterMETAAction,
 preSplitRegionBeforeMETAAction
 , preTruncateTableAction
 
 
 
@@ -1834,7 +1834,7 @@ implements 
 
 requirePermission
-privatevoidrequirePermission(Useruser,
+privatevoidrequirePermission(Useruser,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringrequest,
TableNametableName,
byte[]family,
@@ -1860,7 +1860,7 @@ implements 
 
 requireTablePermission
-privatevoidrequireTablePermission(Useruser,
+privatevoidrequireTablePermission(Useruser,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringrequest,
 TableNametableName,
 byte[]family,
@@ -1886,7 +1886,7 @@ implements 
 
 requireAccess
-privatevoidrequireAccess(Useruser,
+privatevoidrequireAccess(Useruser,

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
index 8d9b628..633e91e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, Cell, ExtendedCell, HeapSize, SettableSequenceId, SettableTimestamp
+http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, Cell, ExtendedCell, HeapSize, RawCell, SettableSequenceId, SettableTimestamp
 
 
 Enclosing class:
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class PrivateCellUtil.ValueAndTagRewriteByteBufferCell
+static class PrivateCellUtil.ValueAndTagRewriteByteBufferCell
 extends PrivateCellUtil.TagRewriteByteBufferCell
 
 
@@ -166,6 +166,13 @@ extends ExtendedCell
 CELL_NOT_BASED_ON_CHUNK
 
+
+
+
+
+Fields inherited from interfaceorg.apache.hadoop.hbase.RawCell
+MAX_TAGS_LENGTH
+
 
 
 
@@ -271,6 +278,13 @@ extends ExtendedCell
 getChunkId
 
+
+
+
+
+Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
+
 
 
 
@@ -291,7 +305,7 @@ extends 
 
 value
-protectedbyte[] value
+protectedbyte[] value
 
 
 
@@ -308,7 +322,7 @@ extends 
 
 ValueAndTagRewriteByteBufferCell
-publicValueAndTagRewriteByteBufferCell(ByteBufferCellcell,
+publicValueAndTagRewriteByteBufferCell(ByteBufferCellcell,
 byte[]value,
 byte[]tags)
 
@@ -327,7 +341,7 @@ extends 
 
 getValueArray
-publicbyte[]getValueArray()
+publicbyte[]getValueArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,647 bytes.
@@ -347,7 +361,7 @@ extends 
 
 getValueOffset
-publicintgetValueOffset()
+publicintgetValueOffset()
 
 Specified by:
 getValueOffsetin
 interfaceCell
@@ -364,7 +378,7 @@ extends 
 
 getValueLength
-publicintgetValueLength()
+publicintgetValueLength()
 
 Specified by:
 getValueLengthin
 interfaceCell
@@ -381,7 +395,7 @@ extends 
 
 getValueByteBuffer
-publichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetValueByteBuffer()
+publichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetValueByteBuffer()
 
 Overrides:
 getValueByteBufferin
 classPrivateCellUtil.TagRewriteByteBufferCell
@@ -396,7 +410,7 @@ extends 
 
 getValuePosition
-publicintgetValuePosition()
+publicintgetValuePosition()
 
 Overrides:
 getValuePositionin
 classPrivateCellUtil.TagRewriteByteBufferCell
@@ -411,7 +425,7 @@ extends 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Specified by:
 heapSizein
 interfaceHeapSize
@@ -429,7 +443,7 @@ extends 
 
 write
-publicintwrite(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
+publicintwrite(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
  booleanwithTags)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:ExtendedCell
@@ -460,7 +474,7 @@ extends 
 
 getSerializedSize
-publicintgetSerializedSize(booleanwithTags)
+publicintgetSerializedSize(booleanwithTags)
 
 Specified by:
 getSerializedSizein
 interfaceExtendedCell
@@ -484,7 +498,7 @@ extends 
 
 write
-publicvoidwrite(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
+publicvoidwrite(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
   intoffset)
 Description copied from 
interface:ExtendedCell
 Write this Cell into the given buf's offset in a KeyValue format.
@@ -505,7 +519,7 @@ extends 
 
 deepClone
-publicExtendedCelldeepClone()
+publicExtendedCelldeepClone()
 Description copied from 
interface:ExtendedCell
 Does a deep copy of the contents 

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a616706/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
index 5cd7172..2f81057 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":42,"i27":42,"i28":42,"i29":42,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":42,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":9,"i80":10,"i81":10,"i82":9,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":41,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":42,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":9,"i116":10,"i117":10,"i118":10,"i119":42,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":42,"i173":10,"i174":10,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":42,"i199":10,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
 
09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":42,"i27":42,"i28":42,"i29":42,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":42,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":9,"i80":10,"i81":10,"i82":9,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":41,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":42,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":9,"i116":10,"i117":10,"i118":10,"i119":42,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":42,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":42,"i198":10,"i199":10,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
 
09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10};
 var tabs = {65535:["t0","All 

  1   2   3   >