[30/51] [partial] hbase-site git commit: Published site at 620d70d6186fb800299bcc62ad7179fccfd1be41.

2019-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa3fb87f/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftTable.Scanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftTable.Scanner.html 
b/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftTable.Scanner.html
new file mode 100644
index 000..301d5a7
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/thrift2/client/ThriftTable.Scanner.html
@@ -0,0 +1,473 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+ThriftTable.Scanner (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.thrift2.client
+Class 
ThriftTable.Scanner
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.thrift2.client.ThriftTable.Scanner
+
+
+
+
+
+
+
+All Implemented Interfaces:
+https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, https://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true;
 title="class or interface in java.lang">IterableResult, ResultScanner
+
+
+Enclosing class:
+ThriftTable
+
+
+
+private class ThriftTable.Scanner
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements ResultScanner
+A scanner to perform scan from thrift server
+ getScannerResults is used in this scanner
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+protected https://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in java.util">QueueResult
+cache
+
+
+protected Result
+lastResult
+
+
+protected 
org.apache.hadoop.hbase.thrift2.generated.TScan
+scan
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+Scanner(Scanscan)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+void
+close()
+Closes the scanner and releases any resources it has 
allocated
+
+
+
+protected byte[]
+createClosestRowAfter(byte[]row)
+Create the closest row after the specified row
+
+
+
+ScanMetrics
+getScanMetrics()
+
+
+Result
+next()
+Grab the next row's worth of values.
+
+
+
+boolean
+renewLease()
+Allow the client to renew the scanner's lease on the 
server.
+
+
+
+private void
+setupNextScanner()
+
+
+
+
+
+
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, 

[30/51] [partial] hbase-site git commit: Published site at 281d6429e55149cc4c05430dcc1d1dc136d8b245.

2019-01-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index ad9c789..a699972 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -556,23 +556,23 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
 org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
+org.apache.hadoop.hbase.client.CompactionState
 org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
+org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
-org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
-org.apache.hadoop.hbase.client.Consistency
-org.apache.hadoop.hbase.client.Scan.ReadType
 org.apache.hadoop.hbase.client.RegionLocateType
 org.apache.hadoop.hbase.client.TableState.State
-org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
-org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
-org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
+org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client.Scan.ReadType
 org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
+org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.SnapshotType
-org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
 org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html
index 22ed76c..c241b4b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static interface SplitLogWorkerCoordination.SplitTaskDetails
+public static interface SplitLogWorkerCoordination.SplitTaskDetails
 Interface for log-split tasks Used to carry implementation 
details in encapsulated way through
  Handlers to the coordination API.
 
@@ -155,7 +155,7 @@ var activeTableTab = "activeTableTab";
 
 
 getWALFile
-https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetWALFile()
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetWALFile()
 
 Returns:
 full file path in HDFS for the WAL file to be split.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/901d593a/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html
index 7fcdca7..e70c1b5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html
@@ -75,13 +75,13 @@ var activeTableTab = "activeTableTab";
 
 Summary:
 Nested|
-Field|
+Field|
 Constr|
 Method
 
 
 Detail:
-Field|
+Field|
 Constr|
 Method
 
@@ -148,25 +148,6 @@ public interface 
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-static int
-DEFAULT_MAX_SPLITTERS
-
-
-
-
 
 
 
@@ -253,27 +234,6 @@ public interface 
 
 
-
-
-
-
-
-Field Detail
-
-
-
-
-
-DEFAULT_MAX_SPLITTERS
-static finalint 

[30/51] [partial] hbase-site git commit: Published site at 466fa920fee572fe20db3b77ebf539dc304d5f31.

2019-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bf59208/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
index 98ae775..1ce7bf0 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -711,182 +711,188 @@
 
 
 
+FutureUtils
+
+Helper class for processing futures.
+
+
+
 GetJavaProperty
 
 A generic way for querying Java properties.
 
 
-
+
 Hash
 
 This class represents a common API for hashing 
functions.
 
 
-
+
 HashedBytes
 
 This class encapsulates a byte array and overrides hashCode 
and equals so
  that it's identity is based on the data rather than the array instance.
 
 
-
+
 HashKeyT
 
 Used to calculate the hash Hash algorithms for 
Bloomfilters.
 
 
-
+
 HasThread
 
 Abstract class which contains a Thread and delegates the 
common Thread
  methods to that instance.
 
 
-
+
 HBaseConfTool
 
 Tool that prints out a configuration.
 
 
-
+
 HBaseFsck
 
 HBaseFsck (hbck) is a tool for checking and repairing 
region consistency and
  table integrity problems in a corrupted HBase.
 
 
-
+
 HBaseFsck.FileLockCallable
 
 
-
+
 HBaseFsck.HBaseFsckTool
 
 This is a Tool wrapper that gathers -Dxxx=yyy configuration 
settings from the command line.
 
 
-
+
 HBaseFsck.HbckInfo
 
 Maintain information about a particular region.
 
 
-
+
 HBaseFsck.HdfsEntry
 
 Stores the regioninfo entries from HDFS
 
 
-
+
 HBaseFsck.MetaEntry
 
 Stores the regioninfo entries scanned from META
 
 
-
+
 HBaseFsck.OnlineEntry
 
 Stores the regioninfo retrieved from Online region 
servers.
 
 
-
+
 HBaseFsck.PrintingErrorReporter
 
 
-
+
 HBaseFsck.RegionBoundariesInformation
 
 
-
+
 HBaseFsck.WorkItemHdfsRegionInfo
 
 Contact hdfs and get all information about specified table 
directory into
  regioninfo list.
 
 
-
+
 HBaseFsck.WorkItemOverlapMerge
 
 
-
+
 HBaseFsck.WorkItemRegion
 
 Contact a region server and get all information from 
it
 
 
-
+
 HBaseFsckRepair
 
 This class contains helper methods that repair parts of 
hbase's filesystem
  contents.
 
 
-
+
 HFileArchiveUtil
 
 Helper class for all utilities related to 
archival/retrieval of HFiles
 
 
-
+
 IdLock
 
 Allows multiple concurrent clients to lock on a numeric id 
with a minimal
  memory overhead.
 
 
-
+
 IdLock.Entry
 
 An entry returned to the client as a lock object
 
 
-
+
 IdReadWriteLockT
 
 Allows multiple concurrent clients to lock on a numeric id 
with ReentrantReadWriteLock.
 
 
-
+
 ImmutableByteArray
 
 Mainly used as keys for HashMap.
 
 
-
+
 IncrementingEnvironmentEdge
 
 Uses an incrementing algorithm instead of the default.
 
 
-
+
 JenkinsHash
 
 Produces 32-bit hash for hash table lookup.
 
 
-
+
 JRubyFormat
 
 Utility class for converting objects to JRuby.
 
 
-
+
 JSONBean
 
 Utility for doing JSON and MBeans.
 
 
-
+
 JsonMapper
 
 Utility class for converting objects to JSON
 
 
-
+
 JSONMetricUtil
 
 
-
+
 JVM
 
 This class is a wrapper for the implementation of
@@ -895,97 +901,97 @@
  depending on the runtime (vendor) used.
 
 
-
+
 JVMClusterUtil
 
 Utility used running a cluster all in the one JVM.
 
 
-
+
 JVMClusterUtil.MasterThread
 
 Datastructure to hold Master Thread and Master 
instance
 
 
-
+
 JVMClusterUtil.RegionServerThread
 
 Datastructure to hold RegionServer Thread and RegionServer 
instance
 
 
-
+
 JvmPauseMonitor
 
 Class which sets up a simple thread which runs in a loop 
sleeping
  for a short interval of time.
 
 
-
+
 JvmPauseMonitor.GcTimes
 
 
-
+
 JvmVersion
 
 Certain JVM versions are known to be unstable with 
HBase.
 
 
-
+
 KeyLockerK
 
 A utility class to manage a set of locks.
 
 
-
+
 LossyCounting
 
 LossyCounting utility, bounded data structure that 
maintains approximate high frequency
  elements in data stream.
 
 
-
+
 ManualEnvironmentEdge
 
 An environment edge that uses a manually set value.
 
 
-
+
 MapreduceDependencyClasspathTool
 
 Generate a classpath string containing any jars required by 
mapreduce jobs.
 
 
-
+
 MapReduceExtendedCell
 
 A wrapper for a cell to be used with mapreduce, as the 
output value class for mappers/reducers.
 
 
-
+
 MD5Hash
 
 Utility class for MD5
  MD5 hash produces a 128-bit digest.
 
 
-
+
 Methods
 
 
-
+
 ModifyRegionUtils
 
 Utility methods for interacting with the regions.
 
 
-
+
 MultiHConnection
 
 Provides ability to create multiple Connection instances 
and allows to process a batch of
  actions using CHTable.doBatchWithCallback()
 
 
-
+
 MunkresAssignment
 
 Computes the optimal (minimal cost) assignment of jobs to 
workers (or other
@@ -996,126 +1002,126 @@
  Problem: An Improved Version of Munkres' Algorithm".
 
 
-
+
 MurmurHash
 
 This is a very fast, non-cryptographic hash suitable for 
general hash-based
  lookup.
 
 
-
+
 

[30/51] [partial] hbase-site git commit: Published site at e4b6b4afb933a961f543537875f87a2dc62d3757.

2019-01-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/849d84a8/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index 97deec1..2ea250a 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1364,7 +1364,11 @@
 
 
 org.apache.hadoop.hbase.mapreduce.SyncTable (implements 
org.apache.hadoop.util.Tool)
-org.apache.hadoop.hbase.thrift2.ThriftServer (implements 
org.apache.hadoop.util.Tool)
+org.apache.hadoop.hbase.thrift.ThriftServer (implements 
org.apache.hadoop.util.Tool)
+
+org.apache.hadoop.hbase.thrift2.ThriftServer
+
+
 org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication (implements 
org.apache.hadoop.util.Tool)
 org.apache.hadoop.hbase.mapreduce.WALPlayer (implements 
org.apache.hadoop.util.Tool)
 org.apache.hadoop.hbase.zookeeper.ZKAclReset (implements 
org.apache.hadoop.util.Tool)
@@ -1387,6 +1391,7 @@
 org.apache.hadoop.hbase.client.ConnectionImplementation.ServerErrorTracker
 org.apache.hadoop.hbase.client.ConnectionImplementation.ServerErrorTracker.ServerErrors
 org.apache.hadoop.hbase.client.ConnectionUtils
+org.apache.hadoop.hbase.thrift.Constants
 org.apache.hadoop.hbase.constraint.ConstraintProcessor (implements 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
 org.apache.hadoop.hbase.constraint.Constraints
 org.apache.hadoop.hbase.io.crypto.Context (implements 
org.apache.hadoop.conf.Configurable)
@@ -1867,6 +1872,12 @@
 org.apache.hadoop.hbase.security.HBaseSaslRpcServer
 org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslDigestCallbackHandler 
(implements javax.security.auth.callback.https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/CallbackHandler.html?is-external=true;
 title="class or interface in 
javax.security.auth.callback">CallbackHandler)
 org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandler 
(implements javax.security.auth.callback.https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/CallbackHandler.html?is-external=true;
 title="class or interface in 
javax.security.auth.callback">CallbackHandler)
+org.apache.hadoop.hbase.thrift.HBaseServiceHandler
+
+org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler (implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface)
+org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler (implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
+
+
 org.apache.hadoop.hbase.HColumnDescriptor (implements 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor, 
java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT)
 
 org.apache.hadoop.hbase.client.ImmutableHColumnDescriptor
@@ -3842,15 +3853,10 @@
 org.apache.hadoop.hbase.util.Threads
 org.apache.hadoop.hbase.util.Threads.PrintThreadInfoLazyHolder
 org.apache.hadoop.hbase.regionserver.ThreadSafeMemStoreSizing (implements 
org.apache.hadoop.hbase.regionserver.MemStoreSizing)
-org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler (implements 
org.apache.hadoop.hbase.thrift2.generated.THBaseService.Iface)
-org.apache.hadoop.hbase.thrift2.ThriftHBaseServiceHandler.THBaseServiceMetricsProxy
 (implements java.lang.reflect.https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/InvocationHandler.html?is-external=true;
 title="class or interface in java.lang.reflect">InvocationHandler)
+org.apache.hadoop.hbase.thrift.ThriftHBaseServiceHandler.ResultScannerWrapper
 org.apache.hadoop.hbase.thrift.ThriftHttpServlet.HttpKerberosServerAction 
(implements java.security.https://docs.oracle.com/javase/8/docs/api/java/security/PrivilegedExceptionAction.html?is-external=true;
 title="class or interface in 
java.security">PrivilegedExceptionActionT)
 org.apache.hadoop.hbase.thrift.ThriftHttpServlet.RemoteUserIdentity
 org.apache.hadoop.hbase.thrift.ThriftMetrics
-org.apache.hadoop.hbase.thrift.ThriftServer
-org.apache.hadoop.hbase.thrift.ThriftServerRunner (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable)
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler (implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface)
-org.apache.hadoop.hbase.thrift.ThriftServerRunner.ResultScannerWrapper
 org.apache.hadoop.hbase.thrift2.ThriftUtilities
 org.apache.hadoop.hbase.thrift.ThriftUtilities
 org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil
@@ -4085,7 +4091,7 @@
 
 org.apache.hadoop.hbase.thrift.generated.IOError (implements 
java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 

[30/51] [partial] hbase-site git commit: Published site at 3ab895979b643a2980bcdb7fee2078f14b614210.

2019-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
index 0e3831c..1902152 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html
@@ -382,6 +382,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/Operation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Operation.html 
b/devapidocs/org/apache/hadoop/hbase/client/Operation.html
index ab3872b..a0a2a4b 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Operation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Operation.html
@@ -469,6 +469,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/OperationWithAttributes.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/OperationWithAttributes.html 
b/devapidocs/org/apache/hadoop/hbase/client/OperationWithAttributes.html
index e4a7f21..f05aa90 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/OperationWithAttributes.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/OperationWithAttributes.html
@@ -522,6 +522,6 @@ implements Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.html 
b/devapidocs/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.html
index 1d3460e..4169915 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.html
@@ -290,6 +290,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.html 
b/devapidocs/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.html
index 800cc5a..53acbfb 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.html
@@ -387,6 +387,6 @@ implements Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.html
index 34a3678..3384979 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.html
@@ -756,6 +756,6 @@ extends Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072019 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ef0dd56d/devapidocs/org/apache/hadoop/hbase/client/Put.html

[30/51] [partial] hbase-site git commit: Published site at 7820ba1dbdba58b1002cdfde08eb21aa7a0bb6da.

2018-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/09ea0d5f/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index 17a35ff..d5ccbf9 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HRegion
+public class HRegion
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements HeapSize, PropagatingConfigurationObserver, Region
 Regions store data for a certain region of a table.  It 
stores all columns
@@ -1782,7 +1782,7 @@ implements 
 
 
-(package private) static HRegion
+static HRegion
 newHRegion(org.apache.hadoop.fs.PathtableDir,
   WALwal,
   org.apache.hadoop.fs.FileSystemfs,
@@ -2413,7 +2413,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -2422,7 +2422,7 @@ implements 
 
 LOAD_CFS_ON_DEMAND_CONFIG_KEY
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOAD_CFS_ON_DEMAND_CONFIG_KEY
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOAD_CFS_ON_DEMAND_CONFIG_KEY
 
 See Also:
 Constant
 Field Values
@@ -2435,7 +2435,7 @@ implements 
 
 HBASE_MAX_CELL_SIZE_KEY
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_MAX_CELL_SIZE_KEY
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_MAX_CELL_SIZE_KEY
 
 See Also:
 Constant
 Field Values
@@ -2448,7 +2448,7 @@ implements 
 
 DEFAULT_MAX_CELL_SIZE
-public static finalint DEFAULT_MAX_CELL_SIZE
+public static finalint DEFAULT_MAX_CELL_SIZE
 
 See Also:
 Constant
 Field Values
@@ -2461,7 +2461,7 @@ implements 
 
 DEFAULT_DURABILITY
-private static finalDurability DEFAULT_DURABILITY
+private static finalDurability DEFAULT_DURABILITY
 This is the global default value for durability. All 
tables/mutations not
  defining a durability or using USE_DEFAULT will default to this value.
 
@@ -2472,7 +2472,7 @@ implements 
 
 HBASE_REGIONSERVER_MINIBATCH_SIZE
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_REGIONSERVER_MINIBATCH_SIZE
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_REGIONSERVER_MINIBATCH_SIZE
 
 See Also:
 Constant
 Field Values
@@ -2485,7 +2485,7 @@ implements 
 
 DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE
-public static finalint DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE
+public static finalint DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE
 
 See Also:
 Constant
 Field Values
@@ -2498,7 +2498,7 @@ implements 
 
 WAL_HSYNC_CONF_KEY
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_HSYNC_CONF_KEY
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_HSYNC_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -2511,7 +2511,7 @@ implements 
 
 DEFAULT_WAL_HSYNC
-public static finalboolean DEFAULT_WAL_HSYNC
+public static finalboolean DEFAULT_WAL_HSYNC
 
 See Also:
 Constant
 Field Values
@@ -2524,7 +2524,7 @@ implements 
 
 closed
-finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean closed
+finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean closed
 
 
 
@@ -2533,7 +2533,7 @@ implements 
 
 closing
-finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean closing
+finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean closing
 
 
 
@@ -2542,7 +2542,7 @@ implements 
 
 maxFlushedSeqId
-private volatilelong maxFlushedSeqId
+private volatilelong maxFlushedSeqId
 The 

[30/51] [partial] hbase-site git commit: Published site at c448604ceb987d113913f0583452b2abce04db0d.

2018-12-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f8b8424/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index 0dcbecc..0438cdb 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HRegion.RegionScannerImpl
+class HRegion.RegionScannerImpl
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RegionScanner, Shipper, RpcCallback
 RegionScannerImpl is used to combine scanners from multiple 
Stores (aka column families).
@@ -425,7 +425,7 @@ implements 
 
 storeHeap
-KeyValueHeap storeHeap
+KeyValueHeap storeHeap
 
 
 
@@ -434,7 +434,7 @@ implements 
 
 joinedHeap
-KeyValueHeap joinedHeap
+KeyValueHeap joinedHeap
 Heap of key-values that are not essential for the provided 
filters and are thus read
  on demand, if on-demand column family loading is enabled.
 
@@ -445,7 +445,7 @@ implements 
 
 joinedContinuationRow
-protectedCell joinedContinuationRow
+protectedCell joinedContinuationRow
 If the joined heap data gathering is interrupted due to 
scan limits, this will
  contain the row for which we are populating the values.
 
@@ -456,7 +456,7 @@ implements 
 
 filterClosed
-privateboolean filterClosed
+privateboolean filterClosed
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 stopRow
-protected finalbyte[] stopRow
+protected finalbyte[] stopRow
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 includeStopRow
-protected finalboolean includeStopRow
+protected finalboolean includeStopRow
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 region
-protected finalHRegion region
+protected finalHRegion region
 
 
 
@@ -492,7 +492,7 @@ implements 
 
 comparator
-protected finalCellComparator comparator
+protected finalCellComparator comparator
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 readPt
-private finallong readPt
+private finallong readPt
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 maxResultSize
-private finallong maxResultSize
+private finallong maxResultSize
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 defaultScannerContext
-private finalScannerContext defaultScannerContext
+private finalScannerContext defaultScannerContext
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 filter
-private finalFilterWrapper filter
+private finalFilterWrapper filter
 
 
 
@@ -545,7 +545,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -561,7 +561,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion,
   longnonceGroup,
@@ -587,7 +587,7 @@ implements 
 
 getRegionInfo
-publicRegionInfogetRegionInfo()
+publicRegionInfogetRegionInfo()
 
 Specified by:
 getRegionInfoin
 interfaceRegionScanner
@@ -602,7 +602,7 @@ implements 
 
 initializeScanners
-protectedvoidinitializeScanners(Scanscan,
+protectedvoidinitializeScanners(Scanscan,
   https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -617,7 +617,7 @@ implements 
 
 initializeKVHeap
-protectedvoidinitializeKVHeap(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
+protectedvoidinitializeKVHeap(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerjoinedScanners,
 HRegionregion)
  throws 

[30/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

2018-12-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
index 2cdee19..e6bc675 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -110,2406 +110,2407 @@
 102import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
 103import 
org.apache.hadoop.hbase.util.Pair;
 104import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-105import 
org.apache.yetus.audience.InterfaceAudience;
-106import 
org.apache.zookeeper.KeeperException;
-107import org.slf4j.Logger;
-108import org.slf4j.LoggerFactory;
-109
-110import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
-111import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
-112import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-113
-114import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;
-150import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-151import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-152import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-153import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-154import 

[30/51] [partial] hbase-site git commit: Published site at 1acbd36c903b048141866b143507bfce124a5c5f.

2018-11-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5299e667/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
index f1c0a08..ff41797 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -107,16 +107,11 @@ var activeTableTab = "activeTableTab";
 
 
 
-
-All Implemented Interfaces:
-ServerListener
-
 
 
 @InterfaceAudience.Private
-public class AssignmentManager
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements ServerListener
+public class AssignmentManager
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 The AssignmentManager is the coordinator for region 
assign/unassign operations.
  
  In-memory states of regions and servers are stored in RegionStates.
@@ -540,49 +535,55 @@ implements getRegionsInTransition()
 
 
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
+getRegionsOnServer(ServerNameserverName)
+Returns the regions hosted by the specified server.
+
+
+
 RegionStates
 getRegionStates()
 
-
+
 RegionStateStore
 getRegionStateStore()
 
-
+
 Pairhttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
 getReopenStatus(TableNametableName)
 Used by the client (via master) to identify if all regions 
have the schema updates
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 getSnapShotOfAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionRegionInforegions)
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 getSystemTables(ServerNameserverName)
 
-
+
 (package private) TableStateManager
 getTableStateManager()
 
-
+
 private void
 handleRegionOverStuckWarningThreshold(RegionInforegionInfo)
 
-
+
 boolean
 hasRegionsInTransition()
 
-
+
 boolean
 isCarryingMeta(ServerNameserverName)
 
-
+
 private boolean
 

[30/51] [partial] hbase-site git commit: Published site at 130057f13774f6b213cdb06952c805a29d59396e.

2018-11-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.Scope.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.Scope.html
 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.Scope.html
new file mode 100644
index 000..5d49142
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.Scope.html
@@ -0,0 +1,210 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.security.access.Permission.Scope 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.security.access.Permission.Scope
+
+
+
+
+
+Packages that use Permission.Scope
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.security.access
+
+
+
+
+
+
+
+
+
+
+Uses of Permission.Scope 
in org.apache.hadoop.hbase.security.access
+
+Fields in org.apache.hadoop.hbase.security.access
 declared as Permission.Scope
+
+Modifier and Type
+Field and Description
+
+
+
+protected Permission.Scope
+Permission.scope
+
+
+
+
+Fields in org.apache.hadoop.hbase.security.access
 with type parameters of type Permission.Scope
+
+Modifier and Type
+Field and Description
+
+
+
+protected static https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte,Permission.Scope
+Permission.SCOPE_BY_CODE
+
+
+
+
+Methods in org.apache.hadoop.hbase.security.access
 that return Permission.Scope
+
+Modifier and Type
+Method and Description
+
+
+
+Permission.Scope
+UserPermission.getAccessScope()
+Get this permission access scope.
+
+
+
+Permission.Scope
+Permission.getAccessScope()
+
+
+static Permission.Scope
+Permission.Scope.valueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Returns the enum constant of this type with the specified 
name.
+
+
+
+static Permission.Scope[]
+Permission.Scope.values()
+Returns an array containing the constants of this enum 
type, in
+the order they are declared.
+
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/68eae623/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
index f16df55..07dc8af 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/access/class-use/Permission.html
@@ -205,7 +205,9 @@
 
 
 private static class
-TableAuthManager.PermissionCacheT
 extends Permission
+AuthManager.PermissionCacheT 
extends Permission
+Cache of permissions, it is thread safe.
+
 
 
 
@@ -218,33 +220,36 @@
 
 
 class
-TablePermission
-Represents an authorization for access for the given 
actions, optionally
- restricted to the given column family or column qualifier, over the
- given table.
+GlobalPermission
+Represents an authorization for access whole cluster.
 
 
 
 class
-UserPermission
-Represents an authorization for access over the given 
table, column family
- plus qualifier, for the given user.
+NamespacePermission
+Represents an authorization for access for the given 
namespace.
+
+
+
+class
+TablePermission
+Represents an authorization for access for the given 
actions, optionally
+ restricted to 

[30/51] [partial] hbase-site git commit: Published site at d5e4faacc354c1bc4d93efa71ca97ee3a056123e.

2018-10-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b5e107c3/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
index 0af8acd..c5f21ac 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
@@ -645,1615 +645,1597 @@
 637
proc.afterReplay(getEnvironment());
 638  }
 639});
-640
-641// 4. Push the procedures to the 
timeout executor
-642waitingTimeoutList.forEach(proc - 
{
-643  
proc.afterReplay(getEnvironment());
-644  timeoutExecutor.add(proc);
-645});
-646// 5. restore locks
-647restoreLocks();
-648// 6. Push the procedure to the 
scheduler
-649
failedList.forEach(scheduler::addBack);
-650runnableList.forEach(p - {
-651  p.afterReplay(getEnvironment());
-652  if (!p.hasParent()) {
-653
sendProcedureLoadedNotification(p.getProcId());
-654  }
-655  // If the procedure holds the lock, 
put the procedure in front
-656  // If its parent holds the lock, 
put the procedure in front
-657  // TODO. Is that possible that its 
ancestor holds the lock?
-658  // For now, the deepest procedure 
hierarchy is:
-659  // ModifyTableProcedure - 
ReopenTableProcedure -
-660  // MoveTableProcedure - 
Unassign/AssignProcedure
-661  // But ModifyTableProcedure and 
ReopenTableProcedure won't hold the lock
-662  // So, check parent lock is 
enough(a tricky case is resovled by HBASE-21384).
-663  // If some one change or add new 
procedures making 'grandpa' procedure
-664  // holds the lock, but parent 
procedure don't hold the lock, there will
-665  // be a problem here. We have to 
check one procedure's ancestors.
-666  // And we need to change 
LockAndQueue.hasParentLock(Procedure? proc) method
-667  // to check all ancestors too.
-668  if (p.isLockedWhenLoading() || 
(p.hasParent()  procedures
-669  
.get(p.getParentProcId()).isLockedWhenLoading())) {
-670scheduler.addFront(p, false);
-671  } else {
-672// if it was not, it can wait.
-673scheduler.addBack(p, false);
-674  }
-675});
-676// After all procedures put into the 
queue, signal the worker threads.
-677// Otherwise, there is a race 
condition. See HBASE-21364.
-678scheduler.signalAll();
-679  }
+640// 4. restore locks
+641restoreLocks();
+642
+643// 5. Push the procedures to the 
timeout executor
+644waitingTimeoutList.forEach(proc - 
{
+645  
proc.afterReplay(getEnvironment());
+646  timeoutExecutor.add(proc);
+647});
+648
+649// 6. Push the procedure to the 
scheduler
+650
failedList.forEach(scheduler::addBack);
+651runnableList.forEach(p - {
+652  p.afterReplay(getEnvironment());
+653  if (!p.hasParent()) {
+654
sendProcedureLoadedNotification(p.getProcId());
+655  }
+656  scheduler.addBack(p);
+657});
+658// After all procedures put into the 
queue, signal the worker threads.
+659// Otherwise, there is a race 
condition. See HBASE-21364.
+660scheduler.signalAll();
+661  }
+662
+663  /**
+664   * Initialize the procedure executor, 
but do not start workers. We will start them later.
+665   * p/
+666   * It calls 
ProcedureStore.recoverLease() and ProcedureStore.load() to recover the lease, 
and
+667   * ensure a single executor, and start 
the procedure replay to resume and recover the previous
+668   * pending and in-progress 
procedures.
+669   * @param numThreads number of threads 
available for procedure execution.
+670   * @param abortOnCorruption true if you 
want to abort your service in case a corrupted procedure
+671   *  is found on replay. 
otherwise false.
+672   */
+673  public void init(int numThreads, 
boolean abortOnCorruption) throws IOException {
+674// We have numThreads executor + one 
timer thread used for timing out
+675// procedures and triggering periodic 
procedures.
+676this.corePoolSize = numThreads;
+677this.maxPoolSize = 10 * numThreads;
+678LOG.info("Starting {} core workers 
(bigger of cpus/4 or 16) with max (burst) worker count={}",
+679corePoolSize, maxPoolSize);
 680
-681  /**
-682   * Initialize the procedure executor, 
but do not start workers. We will start them later.
-683   * p/
-684   * It calls 
ProcedureStore.recoverLease() and ProcedureStore.load() to recover the lease, 
and
-685   * ensure a single executor, and start 
the procedure replay to resume and recover the previous
-686   * pending and in-progress 
procedures.
-687   * @param numThreads number 

[30/51] [partial] hbase-site git commit: Published site at 3fe8649b2c9ba1271c25e8f476548907e4c7a90d.

2018-10-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8f09a71d/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityBasedCandidateGenerator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityBasedCandidateGenerator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityBasedCandidateGenerator.html
index c7d99b2..9d1542c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityBasedCandidateGenerator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityBasedCandidateGenerator.html
@@ -382,1357 +382,1365 @@
 374for (int i = 0; i  
this.curFunctionCosts.length; i++) {
 375  curFunctionCosts[i] = 
tempFunctionCosts[i];
 376}
-377LOG.info("start 
StochasticLoadBalancer.balancer, initCost=" + currentCost + ", functionCost="
-378+ functionCost());
+377double initCost = currentCost;
+378double newCost = currentCost;
 379
-380double initCost = currentCost;
-381double newCost = currentCost;
-382
-383long computedMaxSteps;
-384if (runMaxSteps) {
-385  computedMaxSteps = 
Math.max(this.maxSteps,
-386  ((long)cluster.numRegions * 
(long)this.stepsPerRegion * (long)cluster.numServers));
-387} else {
-388  computedMaxSteps = 
Math.min(this.maxSteps,
-389  ((long)cluster.numRegions * 
(long)this.stepsPerRegion * (long)cluster.numServers));
-390}
-391// Perform a stochastic walk to see 
if we can get a good fit.
-392long step;
-393
-394for (step = 0; step  
computedMaxSteps; step++) {
-395  Cluster.Action action = 
nextAction(cluster);
-396
-397  if (action.type == Type.NULL) {
-398continue;
-399  }
-400
-401  cluster.doAction(action);
-402  updateCostsWithAction(cluster, 
action);
-403
-404  newCost = computeCost(cluster, 
currentCost);
-405
-406  // Should this be kept?
-407  if (newCost  currentCost) {
-408currentCost = newCost;
-409
-410// save for JMX
-411curOverallCost = currentCost;
-412for (int i = 0; i  
this.curFunctionCosts.length; i++) {
-413  curFunctionCosts[i] = 
tempFunctionCosts[i];
-414}
-415  } else {
-416// Put things back the way they 
were before.
-417// TODO: undo by remembering old 
values
-418Action undoAction = 
action.undoAction();
-419cluster.doAction(undoAction);
-420updateCostsWithAction(cluster, 
undoAction);
-421  }
-422
-423  if 
(EnvironmentEdgeManager.currentTime() - startTime 
-424  maxRunningTime) {
-425break;
-426  }
-427}
-428long endTime = 
EnvironmentEdgeManager.currentTime();
-429
-430
metricsBalancer.balanceCluster(endTime - startTime);
-431
-432// update costs metrics
-433updateStochasticCosts(tableName, 
curOverallCost, curFunctionCosts);
-434if (initCost  currentCost) {
-435  plans = 
createRegionPlans(cluster);
-436  LOG.info("Finished computing new 
load balance plan. Computation took {}" +
-437" to try {} different iterations. 
 Found a solution that moves " +
-438"{} regions; Going from a 
computed cost of {}" +
-439" to a new cost of {}", 
java.time.Duration.ofMillis(endTime - startTime),
-440step, plans.size(), initCost, 
currentCost);
-441  return plans;
-442}
-443LOG.info("Could not find a better 
load balance plan.  Tried {} different configurations in " +
-444  "{}, and did not find anything with 
a computed cost less than {}", step,
-445  java.time.Duration.ofMillis(endTime 
- startTime), initCost);
-446return null;
-447  }
-448
-449  /**
-450   * update costs to JMX
-451   */
-452  private void 
updateStochasticCosts(TableName tableName, Double overall, Double[] subCosts) 
{
-453if (tableName == null) return;
-454
-455// check if the metricsBalancer is 
MetricsStochasticBalancer before casting
-456if (metricsBalancer instanceof 
MetricsStochasticBalancer) {
-457  MetricsStochasticBalancer balancer 
= (MetricsStochasticBalancer) metricsBalancer;
-458  // overall cost
-459  
balancer.updateStochasticCost(tableName.getNameAsString(),
-460"Overall", "Overall cost", 
overall);
-461
-462  // each cost function
-463  for (int i = 0; i  
costFunctions.length; i++) {
-464CostFunction costFunction = 
costFunctions[i];
-465String costFunctionName = 
costFunction.getClass().getSimpleName();
-466Double costPercent = (overall == 
0) ? 0 : (subCosts[i] / overall);
-467// TODO: cost function may need a 
specific description
-468
balancer.updateStochasticCost(tableName.getNameAsString(), costFunctionName,
-469  "The percent of " + 
costFunctionName, 

[30/51] [partial] hbase-site git commit: Published site at 7adf590106826b9e4432cfeee06acdc0ccff8c6e.

2018-10-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/425db230/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureMap.Entry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureMap.Entry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureMap.Entry.html
deleted file mode 100644
index a281f12..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureMap.Entry.html
+++ /dev/null
@@ -1,679 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018package 
org.apache.hadoop.hbase.procedure2.store.wal;
-019
-020import java.io.IOException;
-021import 
org.apache.hadoop.hbase.procedure2.Procedure;
-022import 
org.apache.hadoop.hbase.procedure2.ProcedureUtil;
-023import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
-024import org.slf4j.Logger;
-025import org.slf4j.LoggerFactory;
-026
-027import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-028
-029/**
-030 * We keep an in-memory map of the 
procedures sorted by replay order. (see the details in the
-031 * beginning of {@link 
ProcedureWALFormatReader}).
-032 *
-033 * pre
-034 *  procedureMap = | A |   | E |   | 
C |   |   |   |   | G |   |   |
-035 *   D   
B
-036 *  replayOrderHead = C - B 
- E - D - A - G
-037 *
-038 *  We also have a lazy grouping by "root 
procedure", and a list of
-039 *  unlinked procedures. If after reading 
all the WALs we have unlinked
-040 *  procedures it means that we had a 
missing WAL or a corruption.
-041 *  rootHead = A - D 
- G
-042 * B E
-043 * C
-044 *  unlinkFromLinkList = None
-045 * /pre
-046 */
-047class WALProcedureMap {
-048
-049  private static final Logger LOG = 
LoggerFactory.getLogger(WALProcedureMap.class);
-050
-051  private static class Entry {
-052// For bucketed linked lists in 
hash-table.
-053private Entry hashNext;
-054// child head
-055private Entry childHead;
-056// double-link for rootHead or 
childHead
-057private Entry linkNext;
-058private Entry linkPrev;
-059// replay double-linked-list
-060private Entry replayNext;
-061private Entry replayPrev;
-062// procedure-infos
-063private Procedure? 
procedure;
-064private ProcedureProtos.Procedure 
proto;
-065private boolean ready = false;
-066
-067public Entry(Entry hashNext) {
-068  this.hashNext = hashNext;
-069}
-070
-071public long getProcId() {
-072  return proto.getProcId();
-073}
-074
-075public long getParentId() {
-076  return proto.getParentId();
-077}
-078
-079public boolean hasParent() {
-080  return proto.hasParentId();
-081}
-082
-083public boolean isReady() {
-084  return ready;
-085}
-086
-087public boolean isFinished() {
-088  if (!hasParent()) {
-089// we only consider 'root' 
procedures. because for the user 'finished'
-090// means when everything up to 
the 'root' is finished.
-091switch (proto.getState()) {
-092  case ROLLEDBACK:
-093  case SUCCESS:
-094return true;
-095  default:
-096break;
-097}
-098  }
-099  return false;
-100}
-101
-102public Procedure? convert() 
throws IOException {
-103  if (procedure == null) {
-104procedure = 
ProcedureUtil.convertToProcedure(proto);
-105  }
-106  return procedure;
-107}
-108
-109@Override
-110public String toString() {
-111  final StringBuilder sb = new 
StringBuilder();
-112  sb.append("Entry(");
-113  sb.append(getProcId());
-114  sb.append(", parentId=");
-115  sb.append(getParentId());
-116  sb.append(", class=");
-117  sb.append(proto.getClassName());
-118  sb.append(")");
-119  return sb.toString();
-120}
-121  }
-122
-123  private static class EntryIterator 
implements 

[30/51] [partial] hbase-site git commit: Published site at 5fbb227deb365fe812d433fe39b85ac4b0ddee20.

2018-10-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
index 6fbe88b..7db6cd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class BaseLoadBalancer.Cluster.AssignRegionAction
+public static class BaseLoadBalancer.Cluster.AssignRegionAction
 extends BaseLoadBalancer.Cluster.Action
 
 
@@ -239,7 +239,7 @@ extends 
 
 region
-publicint region
+publicint region
 
 
 
@@ -248,7 +248,7 @@ extends 
 
 server
-publicint server
+publicint server
 
 
 
@@ -265,7 +265,7 @@ extends 
 
 AssignRegionAction
-publicAssignRegionAction(intregion,
+publicAssignRegionAction(intregion,
   intserver)
 
 
@@ -283,7 +283,7 @@ extends 
 
 undoAction
-publicBaseLoadBalancer.Cluster.ActionundoAction()
+publicBaseLoadBalancer.Cluster.ActionundoAction()
 Description copied from 
class:BaseLoadBalancer.Cluster.Action
 Returns an Action which would undo this action
 
@@ -298,7 +298,7 @@ extends 
 
 toString
-publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 toStringin
 classBaseLoadBalancer.Cluster.Action

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
index 5f9e616..000adce 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static enum BaseLoadBalancer.Cluster.LocalityType
+static enum BaseLoadBalancer.Cluster.LocalityType
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumBaseLoadBalancer.Cluster.LocalityType
 
 
@@ -210,7 +210,7 @@ the order they are declared.
 
 
 SERVER
-public static finalBaseLoadBalancer.Cluster.LocalityType
 SERVER
+public static finalBaseLoadBalancer.Cluster.LocalityType
 SERVER
 
 
 
@@ -219,7 +219,7 @@ the order they are declared.
 
 
 RACK
-public static finalBaseLoadBalancer.Cluster.LocalityType
 RACK
+public static finalBaseLoadBalancer.Cluster.LocalityType
 RACK
 
 
 
@@ -236,7 +236,7 @@ the order they are declared.
 
 
 values
-public staticBaseLoadBalancer.Cluster.LocalityType[]values()
+public staticBaseLoadBalancer.Cluster.LocalityType[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -256,7 +256,7 @@ for (BaseLoadBalancer.Cluster.LocalityType c : 
BaseLoadBalancer.Cluster.Locality
 
 
 valueOf
-public staticBaseLoadBalancer.Cluster.LocalityTypevalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticBaseLoadBalancer.Cluster.LocalityTypevalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9ebe686/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
index 5ca5086..bd1bf7a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
+++ 

[30/51] [partial] hbase-site git commit: Published site at 821e4d7de2d576189f4288d1c2acf9e9a9471f5c.

2018-10-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/323b17d9/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.html
index 0c69df9..8515fa3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.html
@@ -32,263 +32,263 @@
 024import java.util.Map;
 025import java.util.TreeMap;
 026import java.util.stream.LongStream;
-027import 
org.apache.yetus.audience.InterfaceAudience;
-028import 
org.apache.yetus.audience.InterfaceStability;
-029
-030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-031
-032/**
-033 * Keeps track of live procedures.
-034 *
-035 * It can be used by the ProcedureStore 
to identify which procedures are already
-036 * deleted/completed to avoid the 
deserialization step on restart
-037 */
-038@InterfaceAudience.Private
-039@InterfaceStability.Evolving
-040public class ProcedureStoreTracker {
-041  // Key is procedure id corresponding to 
first bit of the bitmap.
-042  private final TreeMapLong, 
BitSetNode map = new TreeMap();
-043
-044  /**
-045   * If true, do not remove bits 
corresponding to deleted procedures. Note that this can result
-046   * in huge bitmaps overtime.
-047   * Currently, it's set to true only 
when building tracker state from logs during recovery. During
-048   * recovery, if we are sure that a 
procedure has been deleted, reading its old update entries
-049   * can be skipped.
-050   */
-051  private boolean keepDeletes = false;
-052  /**
-053   * If true, it means tracker has 
incomplete information about the active/deleted procedures.
-054   * It's set to true only when 
recovering from old logs. See {@link #isDeleted(long)} docs to
-055   * understand it's real use.
-056   */
-057  boolean partial = false;
-058
-059  private long minModifiedProcId = 
Long.MAX_VALUE;
-060  private long maxModifiedProcId = 
Long.MIN_VALUE;
-061
-062  public enum DeleteState { YES, NO, 
MAYBE }
-063
-064  public void 
resetToProto(ProcedureProtos.ProcedureStoreTracker trackerProtoBuf) {
-065reset();
-066for 
(ProcedureProtos.ProcedureStoreTracker.TrackerNode protoNode: 
trackerProtoBuf.getNodeList()) {
-067  final BitSetNode node = new 
BitSetNode(protoNode);
-068  map.put(node.getStart(), node);
-069}
-070  }
-071
-072  /**
-073   * Resets internal state to same as 
given {@code tracker}. Does deep copy of the bitmap.
-074   */
-075  public void 
resetTo(ProcedureStoreTracker tracker) {
-076resetTo(tracker, false);
-077  }
-078
-079  /**
-080   * Resets internal state to same as 
given {@code tracker}, and change the deleted flag according
-081   * to the modified flag if {@code 
resetDelete} is true. Does deep copy of the bitmap.
-082   * p/
-083   * The {@code resetDelete} will be set 
to true when building cleanup tracker, please see the
-084   * comments in {@link 
BitSetNode#BitSetNode(BitSetNode, boolean)} to learn how we change the
-085   * deleted flag if {@code resetDelete} 
is true.
-086   */
-087  public void 
resetTo(ProcedureStoreTracker tracker, boolean resetDelete) {
-088reset();
-089this.partial = tracker.partial;
-090this.minModifiedProcId = 
tracker.minModifiedProcId;
-091this.maxModifiedProcId = 
tracker.maxModifiedProcId;
-092this.keepDeletes = 
tracker.keepDeletes;
-093for (Map.EntryLong, 
BitSetNode entry : tracker.map.entrySet()) {
-094  map.put(entry.getKey(), new 
BitSetNode(entry.getValue(), resetDelete));
-095}
-096  }
-097
-098  public void insert(long procId) {
-099insert(null, procId);
-100  }
-101
-102  public void insert(long[] procIds) {
-103for (int i = 0; i  
procIds.length; ++i) {
-104  insert(procIds[i]);
-105}
-106  }
-107
-108  public void insert(long procId, long[] 
subProcIds) {
-109BitSetNode node = update(null, 
procId);
-110for (int i = 0; i  
subProcIds.length; ++i) {
-111  node = insert(node, 
subProcIds[i]);
-112}
-113  }
-114
-115  private BitSetNode insert(BitSetNode 
node, long procId) {
-116if (node == null || 
!node.contains(procId)) {
-117  node = getOrCreateNode(procId);
-118}
-119node.insertOrUpdate(procId);
-120trackProcIds(procId);
-121return node;
-122  }
-123
-124  public void update(long procId) {
-125update(null, procId);
-126  }
-127
-128  private BitSetNode update(BitSetNode 
node, long procId) {
-129node = lookupClosestNode(node, 
procId);
-130assert node != null : "expected node 
to update procId=" + procId;
-131assert node.contains(procId) : 
"expected procId=" + procId + " in the node";
-132node.insertOrUpdate(procId);
-133

[30/51] [partial] hbase-site git commit: Published site at fa5fa6ecdd071b72b58971058ff3ab9d28c3e709.

2018-10-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d1341859/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
index 061ce80..bdfc3f8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureRetainer.html
@@ -39,2126 +39,2163 @@
 031import java.util.Set;
 032import 
java.util.concurrent.ConcurrentHashMap;
 033import 
java.util.concurrent.CopyOnWriteArrayList;
-034import java.util.concurrent.TimeUnit;
-035import 
java.util.concurrent.atomic.AtomicBoolean;
-036import 
java.util.concurrent.atomic.AtomicInteger;
-037import 
java.util.concurrent.atomic.AtomicLong;
-038import java.util.stream.Collectors;
-039import java.util.stream.Stream;
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.HConstants;
-042import 
org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
-043import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-044import 
org.apache.hadoop.hbase.procedure2.Procedure.LockState;
-045import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
-046import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
-047import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
-048import 
org.apache.hadoop.hbase.security.User;
-049import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-050import 
org.apache.hadoop.hbase.util.IdLock;
-051import 
org.apache.hadoop.hbase.util.NonceKey;
-052import 
org.apache.hadoop.hbase.util.Threads;
-053import 
org.apache.yetus.audience.InterfaceAudience;
-054import org.slf4j.Logger;
-055import org.slf4j.LoggerFactory;
-056
-057import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-058import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+034import java.util.concurrent.Executor;
+035import java.util.concurrent.Executors;
+036import java.util.concurrent.TimeUnit;
+037import 
java.util.concurrent.atomic.AtomicBoolean;
+038import 
java.util.concurrent.atomic.AtomicInteger;
+039import 
java.util.concurrent.atomic.AtomicLong;
+040import java.util.stream.Collectors;
+041import java.util.stream.Stream;
+042import 
org.apache.hadoop.conf.Configuration;
+043import 
org.apache.hadoop.hbase.HConstants;
+044import 
org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
+045import 
org.apache.hadoop.hbase.log.HBaseMarkers;
+046import 
org.apache.hadoop.hbase.procedure2.Procedure.LockState;
+047import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+048import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
+049import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureStoreListener;
+050import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
+051import 
org.apache.hadoop.hbase.security.User;
+052import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+053import 
org.apache.hadoop.hbase.util.IdLock;
+054import 
org.apache.hadoop.hbase.util.NonceKey;
+055import 
org.apache.hadoop.hbase.util.Threads;
+056import 
org.apache.yetus.audience.InterfaceAudience;
+057import org.slf4j.Logger;
+058import org.slf4j.LoggerFactory;
 059
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
-061
-062/**
-063 * Thread Pool that executes the 
submitted procedures.
-064 * The executor has a ProcedureStore 
associated.
-065 * Each operation is logged and on 
restart the pending procedures are resumed.
-066 *
-067 * Unless the Procedure code throws an 
error (e.g. invalid user input)
-068 * the procedure will complete (at some 
point in time), On restart the pending
-069 * procedures are resumed and the once 
failed will be rolledback.
+060import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+061import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+062import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+063
+064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
+065
+066/**
+067 * Thread Pool that executes the 
submitted procedures.
+068 * The executor has a ProcedureStore 
associated.
+069 * Each operation is logged and on 
restart the pending procedures are resumed.
 070 *
-071 * The user can add procedures to the 
executor via submitProcedure(proc)
-072 * check for the finished state via 
isFinished(procId)
-073 * and get the result via 
getResult(procId)
-074 */
-075@InterfaceAudience.Private
-076public class 
ProcedureExecutorTEnvironment 

[30/51] [partial] hbase-site git commit: Published site at 6bc7089f9e0793efc9bdd46a84f5ccd9bc4579ad.

2018-09-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 21f965b..d551c60 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -520,14 +520,14 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
 org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
-org.apache.hadoop.hbase.util.PoolMap.PoolType
 org.apache.hadoop.hbase.util.ChecksumType
-org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.Order
+org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
+org.apache.hadoop.hbase.util.PoolMap.PoolType
+org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 880c3e2..1dc9d7a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"3.0.0-SNAPSHOT";
-011  public static final String revision = 
"86cb8e48ad8aecf52bca1169a98607c76198c70b";
+011  public static final String revision = 
"6bc7089f9e0793efc9bdd46a84f5ccd9bc4579ad";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Thu 
Sep 27 14:43:35 UTC 2018";
+013  public static final String date = "Fri 
Sep 28 14:44:16 UTC 2018";
 014  public static final String url = 
"git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "7820d3c0ce32dcaf63beb5e00e6d452c";
+015  public static final String srcChecksum 
= "aa6dbdc70b22d25a99789472000c5ecd";
 016}
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/419d0338/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
index a357453..fcf1927 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.html
@@ -337,234 +337,235 @@
 329backoff / 1000, this, 
regionNode.toShortString(), e);
 330  
setTimeout(Math.toIntExact(backoff));
 331  
setState(ProcedureProtos.ProcedureState.WAITING_TIMEOUT);
-332  throw new 
ProcedureSuspendedException();
-333}
-334  }
-335
-336  /**
-337   * At end of timeout, wake ourselves up 
so we run again.
-338   */
-339  @Override
-340  protected synchronized boolean 
setTimeoutFailure(MasterProcedureEnv env) {
-341
setState(ProcedureProtos.ProcedureState.RUNNABLE);
-342
env.getProcedureScheduler().addFront(this);
-343return false; // 'false' means that 
this procedure handled the timeout
-344  }
-345
-346  private void 
reportTransitionOpened(MasterProcedureEnv env, RegionStateNode regionNode,
-347  ServerName serverName, 
TransitionCode code, long openSeqNum) throws IOException {
-348switch (code) {
-349  case OPENED:
-350if (openSeqNum  0) {
-351  throw new 
UnexpectedStateException("Received report unexpected " + code +
-352" transition openSeqNum=" + 
openSeqNum + ", " + regionNode);
-353}
-354if (openSeqNum = 
regionNode.getOpenSeqNum()) {
-355  if (openSeqNum != 0) {
-356LOG.warn("Skip update of 
openSeqNum for {} with {} because the 

[30/51] [partial] hbase-site git commit: Published site at d7e08317d2f214e4cca7b67578aba0ed7a567d54.

2018-09-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/37cf49a6/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index 85fe9a9..31eab60 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":9,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":9,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":9,
 
"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":41,"i117":41,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":9,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":42,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":9,"i174":10,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10,"i182":9,"i183":10,"i184":10,"i185":9,"i186":9,"i187":9,"i188":9,"i189":9,"i190":9,"i191":9,"i192":9,"i193":9,"i194":9,"i195":10,"i196":10,"i197":10,"i198":10,"i199":10,"i200":10,"i201":10,"i202":10,"i203":10,"i204":9,"i205":10,"i206":10,"i207":10,"i208":10,"i209":10,"i210":10,"
 
i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10,"i230":10,"i231":10,"i232":10,"i233":10,"i234":10,"i235":9,"i236":9,"i237":10,"i238":10,"i239":10,"i240":10,"i241":10,"i242":10,"i243":10,"i244":10,"i245":10,"i246":10,"i247":10,"i248":10,"i249":9,"i250":10,"i251":10,"i252":10,"i253":10,"i254":10,"i255":10,"i256":10,"i257":9,"i258":10,"i259":10,"i260":10,"i261":10,"i262":10,"i263":9,"i264":10,"i265":10,"i266":10,"i267":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":9,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":9,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":9,
 

[30/51] [partial] hbase-site git commit: Published site at 8eaaa63114a64bcaeaf0ed9bdd88615ee22255c1.

2018-09-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f6f9d4f3/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/Bytes.html 
b/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
index 9dd868b..9c965b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/Bytes.html
@@ -281,7 +281,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 
-private static boolean
+(package private) static boolean
 UNSAFE_UNALIGNED
 
 
@@ -1462,7 +1462,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 UNSAFE_UNALIGNED
-private static finalboolean UNSAFE_UNALIGNED
+static finalboolean UNSAFE_UNALIGNED
 
 
 
@@ -1471,7 +1471,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 bytes
-privatebyte[] bytes
+privatebyte[] bytes
 
 
 
@@ -1480,7 +1480,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 offset
-privateint offset
+privateint offset
 
 
 
@@ -1489,7 +1489,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 length
-privateint length
+privateint length
 
 
 
@@ -1498,7 +1498,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 BYTES_COMPARATOR
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR
 Pass this to TreeMaps where byte [] are keys.
 
 
@@ -1508,7 +1508,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 BYTES_RAWCOMPARATOR
-public static finalorg.apache.hadoop.io.RawComparatorbyte[] 
BYTES_RAWCOMPARATOR
+public static finalorg.apache.hadoop.io.RawComparatorbyte[] 
BYTES_RAWCOMPARATOR
 Use comparing byte arrays, byte-by-byte
 
 
@@ -1518,7 +1518,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 HEX_CHARS_UPPER
-private static finalchar[] HEX_CHARS_UPPER
+private static finalchar[] HEX_CHARS_UPPER
 
 
 
@@ -1527,7 +1527,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 RNG
-private static finalhttps://docs.oracle.com/javase/8/docs/api/java/security/SecureRandom.html?is-external=true;
 title="class or interface in java.security">SecureRandom RNG
+private static finalhttps://docs.oracle.com/javase/8/docs/api/java/security/SecureRandom.html?is-external=true;
 title="class or interface in java.security">SecureRandom RNG
 
 
 
@@ -1536,7 +1536,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 HEX_CHARS
-private static finalchar[] HEX_CHARS
+private static finalchar[] HEX_CHARS
 
 
 
@@ -1553,7 +1553,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-publicBytes()
+publicBytes()
 Create a zero-size sequence.
 
 
@@ -1563,7 +1563,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-publicBytes(byte[]bytes)
+publicBytes(byte[]bytes)
 Create a Bytes using the byte array as the initial 
value.
 
 Parameters:
@@ -1577,7 +1577,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-publicBytes(Bytesibw)
+publicBytes(Bytesibw)
 Set the new Bytes to the contents of the passed
  ibw.
 
@@ -1592,7 +1592,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 
 Bytes
-publicBytes(byte[]bytes,
+publicBytes(byte[]bytes,
  intoffset,
  intlength)
 Set the value to a given byte range
@@ -1611,7 +1611,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab
 
 Bytes
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicBytes(com.google.protobuf.ByteStringbyteString)
+publicBytes(com.google.protobuf.ByteStringbyteString)
 Deprecated.As of release 2.0.0, this will be removed in HBase 
3.0.0.
 Copy bytes from ByteString instance.
 
@@ -1634,7 +1634,7 @@ public
 
 len
-public static finalintlen(byte[]b)
+public static finalintlen(byte[]b)
 Returns length of the byte array, returning 0 if the array 
is null.
  Useful for calculating sizes.
 
@@ -1651,7 +1651,7 @@ public
 
 get
-publicbyte[]get()
+publicbyte[]get()
 Get the data from the Bytes.
 
 Returns:
@@ -1665,7 +1665,7 @@ public
 
 set
-publicvoidset(byte[]b)
+publicvoidset(byte[]b)
 
 Parameters:
 b - Use passed bytes as backing array for this instance.
@@ -1678,7 +1678,7 @@ public
 
 set
-publicvoidset(byte[]b,
+publicvoidset(byte[]b,
 intoffset,
 intlength)
 
@@ -1696,7 +1696,7 @@ public
 getSize
 

[30/51] [partial] hbase-site git commit: Published site at cd161d976ef47b84e904f2d54bac65d2f3417c2a.

2018-09-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fa1bebf8/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
index a5789e0..93a57cb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
@@ -238,4120 +238,4119 @@
 230 * @see Admin
 231 */
 232@InterfaceAudience.Private
-233@InterfaceStability.Evolving
-234public class HBaseAdmin implements Admin 
{
-235  private static final Logger LOG = 
LoggerFactory.getLogger(HBaseAdmin.class);
-236
-237  private ClusterConnection connection;
-238
-239  private final Configuration conf;
-240  private final long pause;
-241  private final int numRetries;
-242  private final int syncWaitTimeout;
-243  private boolean aborted;
-244  private int operationTimeout;
-245  private int rpcTimeout;
-246
-247  private RpcRetryingCallerFactory 
rpcCallerFactory;
-248  private RpcControllerFactory 
rpcControllerFactory;
-249
-250  private NonceGenerator ng;
-251
-252  @Override
-253  public int getOperationTimeout() {
-254return operationTimeout;
-255  }
-256
-257  HBaseAdmin(ClusterConnection 
connection) throws IOException {
-258this.conf = 
connection.getConfiguration();
-259this.connection = connection;
-260
-261// TODO: receive 
ConnectionConfiguration here rather than re-parsing these configs every time.
-262this.pause = 
this.conf.getLong(HConstants.HBASE_CLIENT_PAUSE,
-263
HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
-264this.numRetries = 
this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-265
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
-266this.operationTimeout = 
this.conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
-267
HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
-268this.rpcTimeout = 
this.conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY,
-269
HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
-270this.syncWaitTimeout = 
this.conf.getInt(
-271  
"hbase.client.sync.wait.timeout.msec", 10 * 6); // 10min
-272
-273this.rpcCallerFactory = 
connection.getRpcRetryingCallerFactory();
-274this.rpcControllerFactory = 
connection.getRpcControllerFactory();
-275
-276this.ng = 
this.connection.getNonceGenerator();
-277  }
-278
-279  @Override
-280  public void abort(String why, Throwable 
e) {
-281// Currently does nothing but throw 
the passed message and exception
-282this.aborted = true;
-283throw new RuntimeException(why, e);
-284  }
-285
-286  @Override
-287  public boolean isAborted() {
-288return this.aborted;
-289  }
-290
-291  @Override
-292  public boolean abortProcedure(final 
long procId, final boolean mayInterruptIfRunning)
-293  throws IOException {
-294return 
get(abortProcedureAsync(procId, mayInterruptIfRunning), this.syncWaitTimeout,
-295  TimeUnit.MILLISECONDS);
-296  }
-297
-298  @Override
-299  public FutureBoolean 
abortProcedureAsync(final long procId, final boolean mayInterruptIfRunning)
-300  throws IOException {
-301Boolean abortProcResponse =
-302executeCallable(new 
MasterCallableAbortProcedureResponse(getConnection(),
-303getRpcControllerFactory()) 
{
-304  @Override
-305  protected AbortProcedureResponse 
rpcCall() throws Exception {
-306AbortProcedureRequest 
abortProcRequest =
-307
AbortProcedureRequest.newBuilder().setProcId(procId).build();
-308return 
master.abortProcedure(getRpcController(), abortProcRequest);
-309  }
-310}).getIsProcedureAborted();
-311return new AbortProcedureFuture(this, 
procId, abortProcResponse);
-312  }
-313
-314  @Override
-315  public ListTableDescriptor 
listTableDescriptors() throws IOException {
-316return 
listTableDescriptors((Pattern)null, false);
-317  }
-318
-319  @Override
-320  public ListTableDescriptor 
listTableDescriptors(Pattern pattern) throws IOException {
-321return listTableDescriptors(pattern, 
false);
-322  }
-323
-324  @Override
-325  public ListTableDescriptor 
listTableDescriptors(Pattern pattern, boolean includeSysTables)
-326  throws IOException {
-327return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-328getRpcControllerFactory()) {
-329  @Override
-330  protected 
ListTableDescriptor rpcCall() throws Exception {
-331GetTableDescriptorsRequest req 
=
-332
RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables);
-333return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
-334req));
-335  }
-336});
-337  }
-338
-339  

[30/51] [partial] hbase-site git commit: Published site at c6a65ba63fce85ac7c4b62b96ef2bbe6c35d2f00.

2018-09-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/293abb17/devapidocs/org/apache/hadoop/hbase/tool/Canary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/tool/Canary.html 
b/devapidocs/org/apache/hadoop/hbase/tool/Canary.html
index 40d2e0c..16a36c4 100644
--- a/devapidocs/org/apache/hadoop/hbase/tool/Canary.html
+++ b/devapidocs/org/apache/hadoop/hbase/tool/Canary.html
@@ -276,46 +276,50 @@ implements org.apache.hadoop.util.Tool
 MAX_THREADS_NUM
 
 
+private long
+permittedFailures
+
+
 private boolean
 regionServerAllRegions
 
-
+
 private boolean
 regionServerMode
 
-
+
 private Canary.Sink
 sink
 
-
+
 private long
 timeout
 
-
+
 private static int
 TIMEOUT_ERROR_EXIT_CODE
 
-
+
 private boolean
 treatFailureAsError
 
-
+
 private static int
 USAGE_EXIT_CODE
 
-
+
 private boolean
 useRegExp
 
-
+
 private boolean
 writeSniffing
 
-
+
 private TableName
 writeTableName
 
-
+
 private boolean
 zookeeperMode
 
@@ -646,13 +650,22 @@ implements org.apache.hadoop.util.Tool
 privateboolean zookeeperMode
 
 
+
+
+
+
+
+permittedFailures
+privatelong permittedFailures
+
+
 
 
 
 
 
 regionServerAllRegions
-privateboolean regionServerAllRegions
+privateboolean regionServerAllRegions
 
 
 
@@ -661,7 +674,7 @@ implements org.apache.hadoop.util.Tool
 
 
 writeSniffing
-privateboolean writeSniffing
+privateboolean writeSniffing
 
 
 
@@ -670,7 +683,7 @@ implements org.apache.hadoop.util.Tool
 
 
 configuredWriteTableTimeout
-privatelong configuredWriteTableTimeout
+privatelong configuredWriteTableTimeout
 
 
 
@@ -679,7 +692,7 @@ implements org.apache.hadoop.util.Tool
 
 
 treatFailureAsError
-privateboolean treatFailureAsError
+privateboolean treatFailureAsError
 
 
 
@@ -688,7 +701,7 @@ implements org.apache.hadoop.util.Tool
 
 
 writeTableName
-privateTableName writeTableName
+privateTableName writeTableName
 
 
 
@@ -697,7 +710,7 @@ implements org.apache.hadoop.util.Tool
 
 
 configuredReadTableTimeouts
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long configuredReadTableTimeouts
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMaphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long configuredReadTableTimeouts
 
 
 
@@ -706,7 +719,7 @@ implements org.apache.hadoop.util.Tool
 
 
 executor
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutorService executor
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutorService executor
 
 
 
@@ -723,7 +736,7 @@ implements org.apache.hadoop.util.Tool
 
 
 Canary
-publicCanary()
+publicCanary()
 
 
 
@@ -732,7 +745,7 @@ implements org.apache.hadoop.util.Tool
 
 
 Canary
-publicCanary(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServiceexecutor,
+publicCanary(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServiceexecutor,
   Canary.Sinksink)
 
 
@@ -750,7 +763,7 @@ implements org.apache.hadoop.util.Tool
 
 
 getConf
-publicorg.apache.hadoop.conf.ConfigurationgetConf()
+publicorg.apache.hadoop.conf.ConfigurationgetConf()
 
 Specified by:
 getConfin 
interfaceorg.apache.hadoop.conf.Configurable
@@ -763,7 +776,7 @@ implements org.apache.hadoop.util.Tool
 
 
 setConf
-publicvoidsetConf(org.apache.hadoop.conf.Configurationconf)
+publicvoidsetConf(org.apache.hadoop.conf.Configurationconf)
 
 Specified by:
 setConfin 
interfaceorg.apache.hadoop.conf.Configurable
@@ -776,7 +789,7 @@ implements org.apache.hadoop.util.Tool
 
 
 parseArgs
-privateintparseArgs(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+privateintparseArgs(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
 
 
 
@@ -785,7 +798,7 @@ implements org.apache.hadoop.util.Tool
 
 
 run

[30/51] [partial] hbase-site git commit: Published site at 7c1fad4992a169a35b4457e6f4afcb30d04406e9.

2018-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index f242040..7540a86 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
-org.apache.hadoop.hbase.procedure2.LockedResourceType
-org.apache.hadoop.hbase.procedure2.LockType
 org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 org.apache.hadoop.hbase.procedure2.Procedure.LockState
+org.apache.hadoop.hbase.procedure2.LockType
+org.apache.hadoop.hbase.procedure2.LockedResourceType
+org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 2c29b8a..a5d95b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.quotas.RpcThrottlingException.Type
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.quotas.RpcThrottlingException.Type
+org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.QuotaType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
index a2b8adf..d8caead 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 https://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true;
 title="class or interface in java.lang">@FunctionalInterface
-public static interface HRegion.BatchOperation.Visitor
+public static interface HRegion.BatchOperation.Visitor
 Visitor interface for batch operations
 
 
@@ -155,7 +155,7 @@ public static interface 
 
 visit
-booleanvisit(intindex)
+booleanvisit(intindex)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Parameters:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/74f60271/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 9f20806..4523a90 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class HRegion.BatchOperationT
+private abstract static class HRegion.BatchOperationT
 

[30/51] [partial] hbase-site git commit: Published site at 3afe9fb7e6ebfa71187cbe131558a83fae61cecd.

2018-08-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/424d7e41/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
index 78cd129..17ea7d8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -51,2276 +51,2305 @@
 043import 
org.apache.hadoop.hbase.TableName;
 044import 
org.apache.hadoop.hbase.UnknownRegionException;
 045import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-046import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-047import 
org.apache.hadoop.hbase.client.RegionInfo;
-048import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-049import 
org.apache.hadoop.hbase.client.TableDescriptor;
-050import 
org.apache.hadoop.hbase.client.TableState;
-051import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-052import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-053import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
-054import 
org.apache.hadoop.hbase.errorhandling.ForeignException;
-055import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-056import 
org.apache.hadoop.hbase.io.hfile.HFile;
-057import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-058import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-059import 
org.apache.hadoop.hbase.ipc.QosPriority;
-060import 
org.apache.hadoop.hbase.ipc.RpcServer;
-061import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-062import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-063import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-064import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-065import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-066import 
org.apache.hadoop.hbase.master.locking.LockProcedure;
-067import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
-068import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil.NonceProcedureRunnable;
-069import 
org.apache.hadoop.hbase.mob.MobUtils;
-070import 
org.apache.hadoop.hbase.procedure.MasterProcedureManager;
-071import 
org.apache.hadoop.hbase.procedure2.LockType;
-072import 
org.apache.hadoop.hbase.procedure2.LockedResource;
-073import 
org.apache.hadoop.hbase.procedure2.Procedure;
-074import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-075import 
org.apache.hadoop.hbase.procedure2.ProcedureUtil;
-076import 
org.apache.hadoop.hbase.procedure2.RemoteProcedureException;
-077import 
org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
-078import 
org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService;
-079import 
org.apache.hadoop.hbase.quotas.MasterQuotaManager;
-080import 
org.apache.hadoop.hbase.quotas.QuotaObserverChore;
-081import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-082import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-083import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-084import 
org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory;
-085import 
org.apache.hadoop.hbase.replication.ReplicationException;
-086import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-087import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-088import 
org.apache.hadoop.hbase.security.User;
-089import 
org.apache.hadoop.hbase.security.access.AccessChecker;
-090import 
org.apache.hadoop.hbase.security.access.AccessController;
-091import 
org.apache.hadoop.hbase.security.access.Permission;
-092import 
org.apache.hadoop.hbase.security.visibility.VisibilityController;
-093import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-094import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-095import 
org.apache.hadoop.hbase.util.Bytes;
-096import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-097import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-098import 
org.apache.hadoop.hbase.util.Pair;
-099import 
org.apache.yetus.audience.InterfaceAudience;
-100import 
org.apache.zookeeper.KeeperException;
-101import org.slf4j.Logger;
-102import org.slf4j.LoggerFactory;
-103
-104import 
org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
-105import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
-106import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-107
-108import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-112import 

[30/51] [partial] hbase-site git commit: Published site at a452487a9b82bfd33bc10683c3f8b8ae74d58883.

2018-08-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cf79db0/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
index 4804996..c3cf972 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
@@ -28,500 +28,538 @@
 020import java.util.ArrayList;
 021import java.util.Collections;
 022import java.util.List;
-023
-024import org.apache.hadoop.hbase.Cell;
-025import 
org.apache.hadoop.hbase.CellUtil;
-026import 
org.apache.hadoop.hbase.HConstants;
-027import 
org.apache.hadoop.hbase.PrivateCellUtil;
-028import 
org.apache.yetus.audience.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-030import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-031import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-033import 
org.apache.hadoop.hbase.util.Bytes;
-034
-035/**
-036 * Filter to support scan multiple row 
key ranges. It can construct the row key ranges from the
-037 * passed list which can be accessed by 
each region server.
-038 *
-039 * HBase is quite efficient when scanning 
only one small row key range. If user needs to specify
-040 * multiple row key ranges in one scan, 
the typical solutions are: 1. through FilterList which is a
-041 * list of row key Filters, 2. using the 
SQL layer over HBase to join with two table, such as hive,
-042 * phoenix etc. However, both solutions 
are inefficient. Both of them can't utilize the range info
-043 * to perform fast forwarding during scan 
which is quite time consuming. If the number of ranges
-044 * are quite big (e.g. millions), join is 
a proper solution though it is slow. However, there are
-045 * cases that user wants to specify a 
small number of ranges to scan (e.g. lt;1000 ranges). Both
-046 * solutions can't provide satisfactory 
performance in such case. MultiRowRangeFilter is to support
-047 * such usec ase (scan multiple row key 
ranges), which can construct the row key ranges from user
-048 * specified list and perform 
fast-forwarding during scan. Thus, the scan will be quite efficient.
-049 */
-050@InterfaceAudience.Public
-051public class MultiRowRangeFilter extends 
FilterBase {
-052
-053  private ListRowRange 
rangeList;
-054
-055  private static final int 
ROW_BEFORE_FIRST_RANGE = -1;
-056  private boolean EXCLUSIVE = false;
-057  private boolean done = false;
-058  private boolean initialized = false;
-059  private int index;
-060  private RowRange range;
-061  private ReturnCode currentReturnCode;
-062
-063  /**
-064   * @param list A list of 
codeRowRange/code
-065   */
-066  public 
MultiRowRangeFilter(ListRowRange list) {
-067this.rangeList = 
sortAndMerge(list);
-068  }
-069
-070  @Override
-071  public boolean filterAllRemaining() {
-072return done;
-073  }
-074
-075  public ListRowRange 
getRowRanges() {
-076return this.rangeList;
-077  }
-078
-079  @Override
-080  public boolean filterRowKey(Cell 
firstRowCell) {
-081if (filterAllRemaining()) return 
true;
-082// If it is the first time of 
running, calculate the current range index for
-083// the row key. If index is out of 
bound which happens when the start row
-084// user sets is after the largest 
stop row of the ranges, stop the scan.
-085// If row key is after the current 
range, find the next range and update index.
-086byte[] rowArr = 
firstRowCell.getRowArray();
-087int length = 
firstRowCell.getRowLength();
-088int offset = 
firstRowCell.getRowOffset();
-089if (!initialized
-090|| !range.contains(rowArr, 
offset, length)) {
-091  byte[] rowkey = 
CellUtil.cloneRow(firstRowCell);
-092  index = 
getNextRangeIndex(rowkey);
-093  if (index = rangeList.size()) 
{
-094done = true;
-095currentReturnCode = 
ReturnCode.NEXT_ROW;
-096return false;
-097  }
-098  if(index != ROW_BEFORE_FIRST_RANGE) 
{
-099range = rangeList.get(index);
-100  } else {
-101range = rangeList.get(0);
-102  }
-103  if (EXCLUSIVE) {
-104EXCLUSIVE = false;
-105currentReturnCode = 
ReturnCode.NEXT_ROW;
-106return false;
-107  }
-108  if (!initialized) {
-109if(index != 
ROW_BEFORE_FIRST_RANGE) {
-110  currentReturnCode = 
ReturnCode.INCLUDE;
-111} else {
-112  currentReturnCode = 
ReturnCode.SEEK_NEXT_USING_HINT;
-113}
-114initialized = true;
-115  } else {
-116if (range.contains(rowArr, 
offset, length)) {
-117  currentReturnCode = 
ReturnCode.INCLUDE;
-118   

[30/51] [partial] hbase-site git commit: Published site at 6a5b4f2a5c188f8eef4f2250b8b7db7dd1e750e4.

2018-08-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1ff05a18/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
index db8431b..a8cb7c4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
@@ -885,7766 +885,7797 @@
 877   * @return What the next sequence 
(edit) id should be.
 878   * @throws IOException e
 879   */
-880  private long initialize(final 
CancelableProgressable reporter) throws IOException {
-881
-882//Refuse to open the region if there 
is no column family in the table
-883if 
(htableDescriptor.getColumnFamilyCount() == 0) {
-884  throw new 
DoNotRetryIOException("Table " + 
htableDescriptor.getTableName().getNameAsString()+
-885  " should have at least one 
column family.");
-886}
-887
-888MonitoredTask status = 
TaskMonitor.get().createStatus("Initializing region " + this);
-889long nextSeqId = -1;
-890try {
-891  nextSeqId = 
initializeRegionInternals(reporter, status);
-892  return nextSeqId;
-893} finally {
-894  // nextSeqid will be -1 if the 
initialization fails.
-895  // At least it will be 0 
otherwise.
-896  if (nextSeqId == -1) {
-897status.abort("Exception during 
region " + getRegionInfo().getRegionNameAsString() +
-898  " initialization.");
-899  }
-900}
-901  }
-902
-903  private long 
initializeRegionInternals(final CancelableProgressable reporter,
-904  final MonitoredTask status) throws 
IOException {
-905if (coprocessorHost != null) {
-906  status.setStatus("Running 
coprocessor pre-open hook");
-907  coprocessorHost.preOpen();
-908}
-909
-910// Write HRI to a file in case we 
need to recover hbase:meta
-911// Only the primary replica should 
write .regioninfo
-912if 
(this.getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {
-913  status.setStatus("Writing region 
info on filesystem");
-914  fs.checkRegionInfoOnFilesystem();
-915}
-916
-917// Initialize all the HStores
-918status.setStatus("Initializing all 
the Stores");
-919long maxSeqId = 
initializeStores(reporter, status);
-920this.mvcc.advanceTo(maxSeqId);
-921if 
(ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
-922  CollectionHStore stores = 
this.stores.values();
-923  try {
-924// update the stores that we are 
replaying
-925LOG.debug("replaying wal for " + 
this.getRegionInfo().getEncodedName());
-926
stores.forEach(HStore::startReplayingFromWAL);
-927// Recover any edits if 
available.
-928maxSeqId = Math.max(maxSeqId,
-929  
replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
status));
-930// Make sure mvcc is up to max.
-931this.mvcc.advanceTo(maxSeqId);
-932  } finally {
-933LOG.debug("stopping wal replay 
for " + this.getRegionInfo().getEncodedName());
-934// update the stores that we are 
done replaying
-935
stores.forEach(HStore::stopReplayingFromWAL);
-936  }
-937}
-938this.lastReplayedOpenRegionSeqId = 
maxSeqId;
-939
-940
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
-941this.writestate.flushRequested = 
false;
-942this.writestate.compacting.set(0);
-943
-944if (this.writestate.writesEnabled) 
{
-945  LOG.debug("Cleaning up temporary 
data for " + this.getRegionInfo().getEncodedName());
-946  // Remove temporary data left over 
from old regions
-947  status.setStatus("Cleaning up 
temporary data from old regions");
-948  fs.cleanupTempDir();
-949}
-950
-951if (this.writestate.writesEnabled) 
{
-952  status.setStatus("Cleaning up 
detritus from prior splits");
-953  // Get rid of any splits or merges 
that were lost in-progress.  Clean out
-954  // these directories here on open.  
We may be opening a region that was
-955  // being split but we crashed in 
the middle of it all.
-956  LOG.debug("Cleaning up detritus for 
" + this.getRegionInfo().getEncodedName());
-957  fs.cleanupAnySplitDetritus();
-958  fs.cleanupMergesDir();
-959}
+880  @VisibleForTesting
+881  long initialize(final 
CancelableProgressable reporter) throws IOException {
+882
+883//Refuse to open the region if there 
is no column family in the table
+884if 
(htableDescriptor.getColumnFamilyCount() == 0) {
+885  throw new 
DoNotRetryIOException("Table " + 
htableDescriptor.getTableName().getNameAsString()+
+886  " should have at least one 
column 

[30/51] [partial] hbase-site git commit: Published site at 63f2d3cbdc8151f5f61f33e0a078c51b9ac076a5.

2018-08-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ae6a80c/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.RegionFailedOpen.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.RegionFailedOpen.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.RegionFailedOpen.html
index f5e8bd0..be3922b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.RegionFailedOpen.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionStates.RegionFailedOpen.html
@@ -49,8 +49,8 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static final class RegionStates.RegionFailedOpen
+public static final class RegionStates.RegionFailedOpen
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -138,7 +138,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 exception
 
 
-private RegionStates.RegionStateNode
+private RegionStateNode
 regionNode
 
 
@@ -160,7 +160,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Constructor and Description
 
 
-RegionFailedOpen(RegionStates.RegionStateNoderegionNode)
+RegionFailedOpen(RegionStateNoderegionNode)
 
 
 
@@ -186,7 +186,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 getRegionInfo()
 
 
-RegionStates.RegionStateNode
+RegionStateNode
 getRegionStateNode()
 
 
@@ -229,7 +229,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionNode
-private finalRegionStates.RegionStateNode regionNode
+private finalRegionStateNode 
regionNode
 
 
 
@@ -238,7 +238,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 exception
-private volatilehttps://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception exception
+private volatilehttps://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception exception
 
 
 
@@ -247,7 +247,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 retries
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger retries
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger retries
 
 
 
@@ -258,13 +258,13 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 Constructor Detail
-
+
 
 
 
 
 RegionFailedOpen
-publicRegionFailedOpen(RegionStates.RegionStateNoderegionNode)
+publicRegionFailedOpen(RegionStateNoderegionNode)
 
 
 
@@ -281,7 +281,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getRegionStateNode
-publicRegionStates.RegionStateNodegetRegionStateNode()
+publicRegionStateNodegetRegionStateNode()
 
 
 
@@ -290,7 +290,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getRegionInfo
-publicRegionInfogetRegionInfo()
+publicRegionInfogetRegionInfo()
 
 
 
@@ -299,7 +299,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 incrementAndGetRetries
-publicintincrementAndGetRetries()
+publicintincrementAndGetRetries()
 
 
 
@@ -308,7 +308,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getRetries
-publicintgetRetries()
+publicintgetRetries()
 
 
 
@@ -317,7 +317,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setException
-publicvoidsetException(https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptionexception)
+publicvoidsetException(https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptionexception)
 
 
 
@@ -326,7 +326,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getException
-publichttps://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">ExceptiongetException()
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">ExceptiongetException()
 
 
 
@@ -357,8 +357,8 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames


[30/51] [partial] hbase-site git commit: Published site at 092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.

2018-08-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellSortImporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellSortImporter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellSortImporter.html
index 39170f0..7859ebc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellSortImporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.CellSortImporter.html
@@ -230,564 +230,567 @@
 222  }
 223}
 224  } catch (InterruptedException e) 
{
-225e.printStackTrace();
-226  }
-227}
-228
-229@Override
-230public void setup(Context context) 
throws IOException {
-231  cfRenameMap = 
createCfRenameMap(context.getConfiguration());
-232  filter = 
instantiateFilter(context.getConfiguration());
-233  int reduceNum = 
context.getNumReduceTasks();
-234  Configuration conf = 
context.getConfiguration();
-235  TableName tableName = 
TableName.valueOf(context.getConfiguration().get(TABLE_NAME));
-236  try (Connection conn = 
ConnectionFactory.createConnection(conf);
-237  RegionLocator regionLocator = 
conn.getRegionLocator(tableName)) {
-238byte[][] startKeys = 
regionLocator.getStartKeys();
-239if (startKeys.length != 
reduceNum) {
-240  throw new IOException("Region 
split after job initialization");
-241}
-242CellWritableComparable[] 
startKeyWraps =
-243new 
CellWritableComparable[startKeys.length - 1];
-244for (int i = 1; i  
startKeys.length; ++i) {
-245  startKeyWraps[i - 1] =
-246  new 
CellWritableComparable(KeyValueUtil.createFirstOnRow(startKeys[i]));
-247}
-248
CellWritableComparablePartitioner.START_KEYS = startKeyWraps;
-249  }
-250}
-251  }
-252
-253  /**
-254   * A mapper that just writes out 
KeyValues.
-255   */
-256  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_COMPARETO_USE_OBJECT_EQUALS",
-257  justification="Writables are going 
away and this has been this way forever")
-258  public static class CellImporter 
extends TableMapperImmutableBytesWritable, Cell {
-259private Mapbyte[], byte[] 
cfRenameMap;
-260private Filter filter;
-261private static final Logger LOG = 
LoggerFactory.getLogger(CellImporter.class);
-262
-263/**
-264 * @param row  The current table row 
key.
-265 * @param value  The columns.
-266 * @param context  The current 
context.
-267 * @throws IOException When something 
is broken with the data.
-268 */
-269@Override
-270public void 
map(ImmutableBytesWritable row, Result value,
-271  Context context)
-272throws IOException {
-273  try {
-274if (LOG.isTraceEnabled()) {
-275  LOG.trace("Considering the 
row."
-276  + Bytes.toString(row.get(), 
row.getOffset(), row.getLength()));
-277}
-278if (filter == null
-279|| 
!filter.filterRowKey(PrivateCellUtil.createFirstOnRow(row.get(), 
row.getOffset(),
-280(short) 
row.getLength( {
-281  for (Cell kv : 
value.rawCells()) {
-282kv = filterKv(filter, kv);
-283// skip if we filtered it 
out
-284if (kv == null) continue;
-285context.write(row, new 
MapReduceExtendedCell(convertKv(kv, cfRenameMap)));
-286  }
-287}
-288  } catch (InterruptedException e) 
{
-289e.printStackTrace();
-290  }
-291}
-292
-293@Override
-294public void setup(Context context) 
{
-295  cfRenameMap = 
createCfRenameMap(context.getConfiguration());
-296  filter = 
instantiateFilter(context.getConfiguration());
-297}
-298  }
-299
-300  /**
-301   * Write table content out to files in 
hdfs.
-302   */
-303  public static class Importer extends 
TableMapperImmutableBytesWritable, Mutation {
-304private Mapbyte[], byte[] 
cfRenameMap;
-305private ListUUID 
clusterIds;
-306private Filter filter;
-307private Durability durability;
-308
-309/**
-310 * @param row  The current table row 
key.
-311 * @param value  The columns.
-312 * @param context  The current 
context.
-313 * @throws IOException When something 
is broken with the data.
-314 */
-315@Override
-316public void 
map(ImmutableBytesWritable row, Result value,
-317  Context context)
-318throws IOException {
-319  try {
-320writeResult(row, value, 
context);
-321  } catch (InterruptedException e) 
{
-322e.printStackTrace();
-323  }
-324}
-325
-326private void 
writeResult(ImmutableBytesWritable key, Result result, Context context)
-327throws IOException, 
InterruptedException {
-328  Put put = null;
-329  Delete delete = null;
-330  if 

[30/51] [partial] hbase-site git commit: Published site at 613d831429960348dc42c3bdb6ea5d31be15c81c.

2018-08-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7cf6034b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
new file mode 100644
index 000..3d1edb3
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockDeserializer.html
@@ -0,0 +1,2186 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.io.hfile;
+019
+020import java.io.DataInputStream;
+021import java.io.DataOutput;
+022import java.io.DataOutputStream;
+023import java.io.IOException;
+024import java.io.InputStream;
+025import java.nio.ByteBuffer;
+026import 
java.util.concurrent.atomic.AtomicReference;
+027import java.util.concurrent.locks.Lock;
+028import 
java.util.concurrent.locks.ReentrantLock;
+029
+030import 
org.apache.hadoop.fs.FSDataInputStream;
+031import 
org.apache.hadoop.fs.FSDataOutputStream;
+032import org.apache.hadoop.fs.Path;
+033import org.apache.hadoop.hbase.Cell;
+034import 
org.apache.hadoop.hbase.HConstants;
+035import 
org.apache.yetus.audience.InterfaceAudience;
+036import org.slf4j.Logger;
+037import org.slf4j.LoggerFactory;
+038import 
org.apache.hadoop.hbase.fs.HFileSystem;
+039import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
+040import 
org.apache.hadoop.hbase.io.ByteBuffInputStream;
+041import 
org.apache.hadoop.hbase.io.ByteBufferWriterDataOutputStream;
+042import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+043import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+044import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+045import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
+046import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
+047import 
org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
+048import 
org.apache.hadoop.hbase.nio.ByteBuff;
+049import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
+050import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
+051import 
org.apache.hadoop.hbase.util.Bytes;
+052import 
org.apache.hadoop.hbase.util.ChecksumType;
+053import 
org.apache.hadoop.hbase.util.ClassSize;
+054import org.apache.hadoop.io.IOUtils;
+055
+056import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+057import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+058
+059/**
+060 * Cacheable Blocks of an {@link HFile} 
version 2 file.
+061 * Version 2 was introduced in 
hbase-0.92.0.
+062 *
+063 * pVersion 1 was the original 
file block. Version 2 was introduced when we changed the hbase file
+064 * format to support multi-level block 
indexes and compound bloom filters (HBASE-3857). Support
+065 * for Version 1 was removed in 
hbase-1.3.0.
+066 *
+067 * h3HFileBlock: Version 
2/h3
+068 * In version 2, a block is structured as 
follows:
+069 * ul
+070 * libHeader:/b 
See Writer#putHeader() for where header is written; header total size is
+071 * HFILEBLOCK_HEADER_SIZE
+072 * ul
+073 * li0. blockType: Magic record 
identifying the {@link BlockType} (8 bytes):
+074 * e.g. 
codeDATABLK*/code
+075 * li1. onDiskSizeWithoutHeader: 
Compressed -- a.k.a 'on disk' -- block size, excluding header,
+076 * but including tailing checksum bytes 
(4 bytes)
+077 * li2. 
uncompressedSizeWithoutHeader: Uncompressed block size, excluding header, and 
excluding
+078 * checksum bytes (4 bytes)
+079 * li3. prevBlockOffset: The 
offset of the previous block of the same type (8 bytes). This is
+080 * used to navigate to the previous block 
without having to go to the block index
+081 * li4: For minorVersions 
gt;=1, the ordinal describing checksum type (1 byte)
+082 * li5: For minorVersions 
gt;=1, the number of data bytes/checksum chunk (4 bytes)
+083 * li6: onDiskDataSizeWithHeader: 
For minorVersions gt;=1, the size of data 'on 

[30/51] [partial] hbase-site git commit: Published site at ba5d1c1f28301adc99019d9d6c4a04fac98ae511.

2018-07-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/804782f0/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
index 9025538..c15caed 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.html
@@ -813,7 +813,7 @@ extends 
 
 queueHasRunnables
-protectedbooleanqueueHasRunnables()
+protectedbooleanqueueHasRunnables()
 Description copied from 
class:AbstractProcedureScheduler
 Returns true if there are procedures available to process.
  NOTE: this method is called with the sched lock held.
@@ -831,7 +831,7 @@ extends 
 
 dequeue
-protectedProceduredequeue()
+protectedProceduredequeue()
 Description copied from 
class:AbstractProcedureScheduler
 Fetch one Procedure from the queue
  NOTE: this method is called with the sched lock held.
@@ -849,7 +849,7 @@ extends 
 
 doPoll
-privateT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableTProcedure?doPoll(FairQueueTfairq)
+privateT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableTProcedure?doPoll(FairQueueTfairq)
 
 
 
@@ -858,7 +858,7 @@ extends 
 
 getLocks
-publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListLockedResourcegetLocks()
+publichttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListLockedResourcegetLocks()
 Description copied from 
interface:ProcedureScheduler
 List lock queues.
 
@@ -873,7 +873,7 @@ extends 
 
 getLockResource
-publicLockedResourcegetLockResource(LockedResourceTyperesourceType,
+publicLockedResourcegetLockResource(LockedResourceTyperesourceType,
   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringresourceName)
 
 Returns:
@@ -887,7 +887,7 @@ extends 
 
 clear
-publicvoidclear()
+publicvoidclear()
 Description copied from 
interface:ProcedureScheduler
 Clear current state of scheduler such that it is equivalent 
to newly created scheduler.
  Used for testing failure and recovery. To emulate server crash/restart,
@@ -900,7 +900,7 @@ extends 
 
 clearQueue
-privatevoidclearQueue()
+privatevoidclearQueue()
 
 
 
@@ -911,7 +911,7 @@ extends 
 
 clear
-privateT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT,TNode extends 
QueueTvoidclear(TNodetreeMap,
+privateT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT,TNode extends 
QueueTvoidclear(TNodetreeMap,
 FairQueueTfairq,
 AvlUtil.AvlKeyComparatorTNodecomparator)
 
@@ -922,7 +922,7 @@ extends 
 
 queueSize
-privateintqueueSize(Queue?head)
+privateintqueueSize(Queue?head)
 
 
 
@@ -931,7 +931,7 @@ extends 
 
 queueSize
-protectedintqueueSize()
+protectedintqueueSize()
 Description copied from 
class:AbstractProcedureScheduler
 Returns the number of elements in this queue.
  NOTE: this method is called with the sched lock held.
@@ -949,7 +949,7 @@ extends 
 
 completionCleanup
-publicvoidcompletionCleanup(Procedureproc)
+publicvoidcompletionCleanup(Procedureproc)
 Description copied from 
interface:ProcedureScheduler
 The procedure in execution completed.
  This can be implemented to perform cleanups.
@@ -965,7 +965,7 @@ extends 
 
 addToRunQueue
-private staticT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">ComparableTvoidaddToRunQueue(FairQueueTfairq,
+private staticT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">ComparableTvoidaddToRunQueue(FairQueueTfairq,
 QueueTqueue)
 
 
@@ -975,7 +975,7 @@ extends 
 
 removeFromRunQueue
-private staticT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 
java.lang">ComparableTvoidremoveFromRunQueue(FairQueueTfairq,
+private staticT extends https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at b4759ce6e72f50ccd9d410bd5917dc5a515414f1.

2018-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
index d4bf03c..c372545 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
@@ -574,7 +574,7 @@
 566 * and rack have the highest locality 
for region
 567 */
 568private void 
computeCachedLocalities() {
-569  rackLocalities = new 
float[numRegions][numServers];
+569  rackLocalities = new 
float[numRegions][numRacks];
 570  regionsToMostLocalEntities = new 
int[LocalityType.values().length][numRegions];
 571
 572  // Compute localities and find most 
local server per region

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
index d4bf03c..c372545 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
@@ -574,7 +574,7 @@
 566 * and rack have the highest locality 
for region
 567 */
 568private void 
computeCachedLocalities() {
-569  rackLocalities = new 
float[numRegions][numServers];
+569  rackLocalities = new 
float[numRegions][numRacks];
 570  regionsToMostLocalEntities = new 
int[LocalityType.values().length][numRegions];
 571
 572  // Compute localities and find most 
local server per region

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
index d4bf03c..c372545 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
@@ -574,7 +574,7 @@
 566 * and rack have the highest locality 
for region
 567 */
 568private void 
computeCachedLocalities() {
-569  rackLocalities = new 
float[numRegions][numServers];
+569  rackLocalities = new 
float[numRegions][numRacks];
 570  regionsToMostLocalEntities = new 
int[LocalityType.values().length][numRegions];
 571
 572  // Compute localities and find most 
local server per region

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
index d4bf03c..c372545 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
@@ -574,7 +574,7 @@
 566 * and rack have the highest locality 
for region
 567 */
 568private void 
computeCachedLocalities() {
-569  rackLocalities = new 
float[numRegions][numServers];
+569  rackLocalities = new 
float[numRegions][numRacks];
 570  regionsToMostLocalEntities = new 
int[LocalityType.values().length][numRegions];
 571
 572  // Compute localities and find most 
local server per region

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/df8fd1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
 

[30/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0c6f447e/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HRegionLocation.html 
b/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
index b02567a..fe2ce0e 100644
--- a/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
+++ b/apidocs/org/apache/hadoop/hbase/HRegionLocation.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -19,45 +19,45 @@
 }
 //-->
 var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":42,"i7":10,"i8":10,"i9":10,"i10":10};
-var tabs = {65535:["t0","所有方法"],2:["t2","实例方法"],8:["t4","å…
·ä½“方法"],32:["t6","已过时的方法"]};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
 var activeTableTab = "activeTableTab";
 
 
-您的浏览器已禁用 JavaScript。
+JavaScript is disabled on your browser.
 
 
 
 
 
-跳过导航链接
+Skip navigation links
 
 
 
-
-概览
-程序包
-ç±»
-使用
-树
-已过时
-索引
-帮助
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
 
 
 
 
-上一个类
-下一个类
+PrevClass
+NextClass
 
 
-框架
-无框架
+Frames
+NoFrames
 
 
-所有类
+AllClasses
 
 
 

[30/51] [partial] hbase-site git commit: Published site at e66a6603e36ecd67237ca16acd5e2de03f0d372d.

2018-07-19 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5427a45e/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HRegionInfo.html 
b/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
index 1655e52..cb3cceb 100644
--- a/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
+++ b/apidocs/org/apache/hadoop/hbase/HRegionInfo.html
@@ -1,6 +1,6 @@
 http://www.w3.org/TR/html4/loose.dtd;>
 
-
+
 
 
 
@@ -19,45 +19,45 @@
 }
 //-->
 var methods = 
{"i0":41,"i1":42,"i2":42,"i3":42,"i4":41,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":41,"i16":41,"i17":42,"i18":41,"i19":42,"i20":41,"i21":42,"i22":41,"i23":42,"i24":42,"i25":42,"i26":42,"i27":42,"i28":42,"i29":41,"i30":41,"i31":41,"i32":41,"i33":41,"i34":42,"i35":42,"i36":42,"i37":42,"i38":42};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
+var tabs = 
{65535:["t0","所有方法"],1:["t1","静态方法"],2:["t2","实例方法"],8:["t4","å
…·ä½“方法"],32:["t6","已过时的方法"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
 var activeTableTab = "activeTableTab";
 
 
-JavaScript is disabled on your browser.
+您的浏览器已禁用 JavaScript。
 
 
 
 
 
-Skip navigation links
+跳过导航链接
 
 
 
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
+
+概览
+程序包
+ç±»
+使用
+树
+已过时
+索引
+帮助
 
 
 
 
-PrevClass
-NextClass
+上一个类
+下一个类
 
 
-Frames
-NoFrames
+框架
+无框架
 
 
-AllClasses
+所有类
 
 
 

[30/51] [partial] hbase-site git commit: Published site at 0f23784182ab88649de340d75804e0ff20dcd0fc.

2018-07-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 31c6fd0..7e28d37 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.CompactionChecker
+private static class HRegionServer.CompactionChecker
 extends ScheduledChore
 
 
@@ -233,7 +233,7 @@ extends 
 
 instance
-private finalHRegionServer instance
+private finalHRegionServer instance
 
 
 
@@ -242,7 +242,7 @@ extends 
 
 majorCompactPriority
-private finalint majorCompactPriority
+private finalint majorCompactPriority
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 DEFAULT_PRIORITY
-private static finalint DEFAULT_PRIORITY
+private static finalint DEFAULT_PRIORITY
 
 See Also:
 Constant
 Field Values
@@ -264,7 +264,7 @@ extends 
 
 iteration
-privatelong iteration
+privatelong iteration
 
 
 
@@ -281,7 +281,7 @@ extends 
 
 CompactionChecker
-CompactionChecker(HRegionServerh,
+CompactionChecker(HRegionServerh,
   intsleepTime,
   Stoppablestopper)
 
@@ -300,7 +300,7 @@ extends 
 
 chore
-protectedvoidchore()
+protectedvoidchore()
 Description copied from 
class:ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 6eda1d5..cab0193 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.MovedRegionInfo
+private static class HRegionServer.MovedRegionInfo
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -218,7 +218,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 serverName
-private finalServerName serverName
+private finalServerName serverName
 
 
 
@@ -227,7 +227,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 seqNum
-private finallong seqNum
+private finallong seqNum
 
 
 
@@ -236,7 +236,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ts
-private finallong ts
+private finallong ts
 
 
 
@@ -253,7 +253,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MovedRegionInfo
-publicMovedRegionInfo(ServerNameserverName,
+publicMovedRegionInfo(ServerNameserverName,
longcloseSeqNum)
 
 
@@ -271,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getServerName
-publicServerNamegetServerName()
+publicServerNamegetServerName()
 
 
 
@@ -280,7 +280,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getSeqNum
-publiclonggetSeqNum()
+publiclonggetSeqNum()
 
 
 
@@ -289,7 +289,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMoveTime
-publiclonggetMoveTime()
+publiclonggetMoveTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bcb555af/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index b2b3568..6cf54e0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static final class HRegionServer.MovedRegionsCleaner
+protected static final class HRegionServer.MovedRegionsCleaner
 extends ScheduledChore
 implements Stoppable
 Creates a Chore thread to clean the moved region 
cache.
@@ -242,7 +242,7 @@ implements 
 
 regionServer
-privateHRegionServer regionServer
+privateHRegionServer regionServer
 
 
 
@@ -251,7 +251,7 @@ implements 
 
 

[30/51] [partial] hbase-site git commit: Published site at 85b41f36e01214b6485c9352875c84ebf877dab3.

2018-06-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a5c66de0/devapidocs/org/apache/hadoop/hbase/security/access/AccessChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessChecker.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessChecker.html
index a8cc47a..ac0967d 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessChecker.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessChecker.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":9,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":10,"i2":9,"i3":10,"i4":9,"i5":9,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class AccessChecker
+public final class AccessChecker
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -118,6 +118,27 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+static class
+AccessChecker.InputUser
+A temporary user class to instantiate User instance based 
on the name and groups.
+
+
+
+
+
 
 
 
@@ -145,6 +166,16 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  has been set to true in site configuration.see HBASE-19483.
 
 
+
+private static 
org.apache.hadoop.security.Groups
+groupService
+Group service to retrieve the user group information
+
+
+
+private static org.slf4j.Logger
+LOG
+
 
 
 
@@ -193,14 +224,24 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 getAuthManager()
 
 
+static https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getUserGroups(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser)
+Retrieve the groups of the given user.
+
+
+
+private void
+initGroupService(org.apache.hadoop.conf.Configurationconf)
+
+
 static boolean
 isAuthorizationSupported(org.apache.hadoop.conf.Configurationconf)
 
-
+
 static void
 logResult(AuthResultresult)
 
-
+
 void
 requireAccess(Useruser,
  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringrequest,
@@ -209,7 +250,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Authorizes that the current user has any of the given 
permissions to access the table.
 
 
-
+
 void
 requireGlobalPermission(Useruser,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringrequest,
@@ -218,26 +259,28 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Checks that the user has the given global permission.
 
 
-
+
 void
-requireGlobalPermission(Useruser,
+requireGlobalPermission(Useruser,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringrequest,
Permission.Actionperm,
TableNametableName,
-   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],? extends https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionbyte[]familyMap)
+   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],? extends https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionbyte[]familyMap,
+   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfilterUser)
 Checks that the user has the given global permission.
 
 
-
+
 void

[30/51] [partial] hbase-site git commit: Published site at 6198e1fc7dfa85c3bc6b2855f9a5fb5f4b2354ff.

2018-06-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
index 495d605..18c887d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceState.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class ConnectionImplementation.MasterServiceState
+static class ConnectionImplementation.MasterServiceState
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 State of the MasterService connection/setup.
 
@@ -222,7 +222,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 connection
-Connection connection
+Connection connection
 
 
 
@@ -231,7 +231,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 stub
-org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface
 stub
+org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface
 stub
 
 
 
@@ -240,7 +240,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 userCount
-int userCount
+int userCount
 
 
 
@@ -257,7 +257,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MasterServiceState
-MasterServiceState(Connectionconnection)
+MasterServiceState(Connectionconnection)
 
 
 
@@ -274,7 +274,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 toString
-publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -287,7 +287,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getStub
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">ObjectgetStub()
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">ObjectgetStub()
 
 
 
@@ -296,7 +296,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 clearStub
-voidclearStub()
+voidclearStub()
 
 
 
@@ -305,7 +305,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 isMasterRunning
-booleanisMasterRunning()
+booleanisMasterRunning()
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb5d2c62/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
index d42ae99..115cae2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.MasterServiceStubMaker.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private final class ConnectionImplementation.MasterServiceStubMaker
+private final class ConnectionImplementation.MasterServiceStubMaker
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Class to make a MasterServiceStubMaker stub.
 
@@ -197,7 +197,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MasterServiceStubMaker
-privateMasterServiceStubMaker()
+privateMasterServiceStubMaker()
 
 
 
@@ -214,7 +214,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 isMasterRunning
-privatevoidisMasterRunning(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterfacestub)

[30/51] [partial] hbase-site git commit: Published site at 14087cc919da9f2e0b1a68f701f6365ad9d1d71f.

2018-06-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55ce8d97/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
index 70f6391..d01bd8b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
@@ -223,8 +223,9 @@ extends MoveRegionProcedure()
 
 
-MoveRegionProcedure(MasterProcedureEnvenv,
-   RegionPlanplan)
+MoveRegionProcedure(MasterProcedureEnvenv,
+   RegionPlanplan,
+   booleancheck)
 
 
 
@@ -396,19 +397,24 @@ extends MoveRegionProcedure()
 
 
-
+
 
 
 
 
 MoveRegionProcedure
-publicMoveRegionProcedure(MasterProcedureEnvenv,
-   RegionPlanplan)
+publicMoveRegionProcedure(MasterProcedureEnvenv,
+   RegionPlanplan,
+   booleancheck)
 throws HBaseIOException
 
+Parameters:
+check - whether we should do some checks in the constructor. 
We will skip the checks if we
+  are reopening a region as this may fail the whole procedure and 
cause stuck. We will
+  do the check later when actually executing the procedure so not a 
big problem.
 Throws:
-https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - If the cluster 
is offline or master is stopping or if table is disabled
-   or non-existent.
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - If the cluster 
is offline or master is stopping or if table is disabled or
+   non-existent.
 HBaseIOException
 
 
@@ -427,7 +433,7 @@ extends 
 
 executeFromState
-protectedStateMachineProcedure.FlowexecuteFromState(MasterProcedureEnvenv,
+protectedStateMachineProcedure.FlowexecuteFromState(MasterProcedureEnvenv,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStatestate)
throws https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 Description copied from 
class:StateMachineProcedure
@@ -450,7 +456,7 @@ extends 
 
 rollbackState
-protectedvoidrollbackState(MasterProcedureEnvenv,
+protectedvoidrollbackState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStatestate)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:StateMachineProcedure
@@ -470,7 +476,7 @@ extends 
 
 abort
-publicbooleanabort(MasterProcedureEnvenv)
+publicbooleanabort(MasterProcedureEnvenv)
 Description copied from 
class:Procedure
 The abort() call is asynchronous and each procedure must 
decide how to deal
  with it, if they want to be abortable. The simplest implementation
@@ -493,7 +499,7 @@ extends 
 
 toStringClassDetails
-publicvoidtoStringClassDetails(https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
+publicvoidtoStringClassDetails(https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
 Description copied from 
class:Procedure
 Extend the toString() information with the procedure details
  e.g. className and parameters
@@ -511,7 +517,7 @@ extends 
 
 getInitialState
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStategetInitialState()
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStategetInitialState()
 Description copied from 
class:StateMachineProcedure
 Return the initial state object that will be used for the 
first call to executeFromState().
 
@@ -528,7 +534,7 @@ extends 
 
 getStateId
-protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStatestate)
+protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStatestate)
 Description copied from 
class:StateMachineProcedure
 Convert the Enum (or more descriptive) state object to an 
ordinal (or state id).
 
@@ -547,7 +553,7 @@ extends 
 
 getState
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStategetState(intstateId)

[30/51] [partial] hbase-site git commit: Published site at 72784c2d836a4b977667449d3adec5e8d15453f5.

2018-06-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b11656f/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
index b6e7636..592c2cc 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
@@ -356,3901 +356,3924 @@
 348  public FutureVoid 
modifyTableAsync(TableDescriptor td) throws IOException {
 349ModifyTableResponse response = 
executeCallable(
 350  new 
MasterCallableModifyTableResponse(getConnection(), 
getRpcControllerFactory()) {
-351@Override
-352protected ModifyTableResponse 
rpcCall() throws Exception {
-353  
setPriority(td.getTableName());
-354  ModifyTableRequest request = 
RequestConverter.buildModifyTableRequest(
-355td.getTableName(), td, 
ng.getNonceGroup(), ng.newNonce());
-356  return 
master.modifyTable(getRpcController(), request);
-357}
-358  });
-359return new ModifyTableFuture(this, 
td.getTableName(), response);
-360  }
-361
-362  @Override
-363  public ListTableDescriptor 
listTableDescriptorsByNamespace(byte[] name) throws IOException {
-364return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-365getRpcControllerFactory()) {
-366  @Override
-367  protected 
ListTableDescriptor rpcCall() throws Exception {
-368return 
master.listTableDescriptorsByNamespace(getRpcController(),
-369
ListTableDescriptorsByNamespaceRequest.newBuilder()
-370  
.setNamespaceName(Bytes.toString(name)).build())
-371.getTableSchemaList()
-372.stream()
-373
.map(ProtobufUtil::toTableDescriptor)
-374
.collect(Collectors.toList());
-375  }
-376});
-377  }
-378
-379  @Override
-380  public ListTableDescriptor 
listTableDescriptors(ListTableName tableNames) throws IOException {
-381return executeCallable(new 
MasterCallableListTableDescriptor(getConnection(),
-382getRpcControllerFactory()) {
-383  @Override
-384  protected 
ListTableDescriptor rpcCall() throws Exception {
-385GetTableDescriptorsRequest req 
=
-386
RequestConverter.buildGetTableDescriptorsRequest(tableNames);
-387  return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
-388  req));
-389  }
-390});
-391  }
-392
-393  @Override
-394  public ListRegionInfo 
getRegions(final ServerName sn) throws IOException {
-395AdminService.BlockingInterface admin 
= this.connection.getAdmin(sn);
-396// TODO: There is no timeout on this 
controller. Set one!
-397HBaseRpcController controller = 
rpcControllerFactory.newController();
-398return 
ProtobufUtil.getOnlineRegions(controller, admin);
-399  }
-400
-401  @Override
-402  public ListRegionInfo 
getRegions(TableName tableName) throws IOException {
-403if 
(TableName.isMetaTableName(tableName)) {
-404  return 
Arrays.asList(RegionInfoBuilder.FIRST_META_REGIONINFO);
-405} else {
-406  return 
MetaTableAccessor.getTableRegions(connection, tableName, true);
-407}
-408  }
-409
-410  private static class 
AbortProcedureFuture extends ProcedureFutureBoolean {
-411private boolean isAbortInProgress;
-412
-413public AbortProcedureFuture(
-414final HBaseAdmin admin,
-415final Long procId,
-416final Boolean abortProcResponse) 
{
-417  super(admin, procId);
-418  this.isAbortInProgress = 
abortProcResponse;
-419}
-420
-421@Override
-422public Boolean get(long timeout, 
TimeUnit unit)
-423throws InterruptedException, 
ExecutionException, TimeoutException {
-424  if (!this.isAbortInProgress) {
-425return false;
-426  }
-427  super.get(timeout, unit);
-428  return true;
-429}
-430  }
-431
-432  /** @return Connection used by this 
object. */
-433  @Override
-434  public Connection getConnection() {
-435return connection;
-436  }
-437
-438  @Override
-439  public boolean tableExists(final 
TableName tableName) throws IOException {
-440return executeCallable(new 
RpcRetryingCallableBoolean() {
-441  @Override
-442  protected Boolean rpcCall(int 
callTimeout) throws Exception {
-443return 
MetaTableAccessor.tableExists(connection, tableName);
-444  }
-445});
-446  }
-447
-448  @Override
-449  public HTableDescriptor[] listTables() 
throws IOException {
-450return listTables((Pattern)null, 
false);
-451  }
-452
-453  @Override
-454  public HTableDescriptor[] 
listTables(Pattern pattern) throws IOException {
-455   

[30/51] [partial] hbase-site git commit: Published site at 9101fc246f86445006bfbcdfda5cc495016dc280.

2018-06-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/65565d77/devapidocs/org/apache/hadoop/hbase/master/procedure/SchemaLocking.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/SchemaLocking.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/SchemaLocking.html
index 2153167..29b58bc 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/SchemaLocking.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/SchemaLocking.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -188,7 +188,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 Method Summary
 
-All MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete MethodsDeprecated Methods
 
 Modifier and Type
 Method and Description
@@ -234,7 +234,12 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 (package private) LockAndQueue
-getMetaLock()
+getMetaLock()
+Deprecated.
+only used for RecoverMetaProcedure.
 Should be removed along with
+ RecoverMetaProcedure.
+
+
 
 
 (package private) LockAndQueue
@@ -427,7 +432,10 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMetaLock
-LockAndQueuegetMetaLock()
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
+LockAndQueuegetMetaLock()
+Deprecated.only used for RecoverMetaProcedure.
 Should be removed along with
+ RecoverMetaProcedure.
 
 
 
@@ -436,7 +444,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 removeRegionLock
-LockAndQueueremoveRegionLock(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringencodedRegionName)
+LockAndQueueremoveRegionLock(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringencodedRegionName)
 
 
 
@@ -445,7 +453,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getServerLock
-LockAndQueuegetServerLock(ServerNameserverName)
+LockAndQueuegetServerLock(ServerNameserverName)
 
 
 
@@ -454,7 +462,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getPeerLock
-LockAndQueuegetPeerLock(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
+LockAndQueuegetPeerLock(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 
 
 
@@ -463,7 +471,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 removePeerLock
-LockAndQueueremovePeerLock(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
+LockAndQueueremovePeerLock(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 
 
 
@@ -472,7 +480,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 createLockedResource
-privateLockedResourcecreateLockedResource(LockedResourceTyperesourceType,
+privateLockedResourcecreateLockedResource(LockedResourceTyperesourceType,
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringresourceName,
 LockAndQueuequeue)
 
@@ -483,7 +491,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 addToLockedResources
-privateTvoidaddToLockedResources(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListLockedResourcelockedResources,
+privateTvoidaddToLockedResources(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListLockedResourcelockedResources,
   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[30/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

2018-06-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
index aa48364..9549aa5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
@@ -2830,843 +2830,858 @@
 2822   * @return true if master is in 
maintenanceMode
 2823   */
 2824  @Override
-2825  public boolean isInMaintenanceMode() 
{
-2826return 
maintenanceModeTracker.isInMaintenanceMode();
-2827  }
-2828
-2829  @VisibleForTesting
-2830  public void setInitialized(boolean 
isInitialized) {
-2831
procedureExecutor.getEnvironment().setEventReady(initialized, isInitialized);
-2832  }
-2833
-2834  @Override
-2835  public ProcedureEvent? 
getInitializedEvent() {
-2836return initialized;
-2837  }
-2838
-2839  /**
-2840   * ServerCrashProcessingEnabled is set 
false before completing assignMeta to prevent processing
-2841   * of crashed servers.
-2842   * @return true if assignMeta has 
completed;
-2843   */
-2844  @Override
-2845  public boolean 
isServerCrashProcessingEnabled() {
-2846return 
serverCrashProcessingEnabled.isReady();
-2847  }
-2848
-2849  @VisibleForTesting
-2850  public void 
setServerCrashProcessingEnabled(final boolean b) {
-2851
procedureExecutor.getEnvironment().setEventReady(serverCrashProcessingEnabled, 
b);
-2852  }
-2853
-2854  public ProcedureEvent? 
getServerCrashProcessingEnabledEvent() {
-2855return 
serverCrashProcessingEnabled;
-2856  }
-2857
-2858  /**
-2859   * Compute the average load across all 
region servers.
-2860   * Currently, this uses a very naive 
computation - just uses the number of
-2861   * regions being served, ignoring 
stats about number of requests.
-2862   * @return the average load
-2863   */
-2864  public double getAverageLoad() {
-2865if (this.assignmentManager == null) 
{
-2866  return 0;
-2867}
-2868
-2869RegionStates regionStates = 
this.assignmentManager.getRegionStates();
-2870if (regionStates == null) {
-2871  return 0;
-2872}
-2873return 
regionStates.getAverageLoad();
-2874  }
-2875
-2876  /*
-2877   * @return the count of region split 
plans executed
-2878   */
-2879  public long getSplitPlanCount() {
-2880return splitPlanCount;
-2881  }
-2882
-2883  /*
-2884   * @return the count of region merge 
plans executed
-2885   */
-2886  public long getMergePlanCount() {
-2887return mergePlanCount;
-2888  }
-2889
-2890  @Override
-2891  public boolean registerService(Service 
instance) {
-2892/*
-2893 * No stacking of instances is 
allowed for a single service name
-2894 */
-2895Descriptors.ServiceDescriptor 
serviceDesc = instance.getDescriptorForType();
-2896String serviceName = 
CoprocessorRpcUtils.getServiceName(serviceDesc);
-2897if 
(coprocessorServiceHandlers.containsKey(serviceName)) {
-2898  LOG.error("Coprocessor service 
"+serviceName+
-2899  " already registered, 
rejecting request from "+instance
-2900  );
-2901  return false;
-2902}
-2903
-2904
coprocessorServiceHandlers.put(serviceName, instance);
-2905if (LOG.isDebugEnabled()) {
-2906  LOG.debug("Registered master 
coprocessor service: service="+serviceName);
-2907}
-2908return true;
-2909  }
-2910
-2911  /**
-2912   * Utility for constructing an 
instance of the passed HMaster class.
-2913   * @param masterClass
-2914   * @return HMaster instance.
-2915   */
-2916  public static HMaster 
constructMaster(Class? extends HMaster masterClass,
-2917  final Configuration conf)  {
-2918try {
-2919  Constructor? extends 
HMaster c = masterClass.getConstructor(Configuration.class);
-2920  return c.newInstance(conf);
-2921} catch(Exception e) {
-2922  Throwable error = e;
-2923  if (e instanceof 
InvocationTargetException 
-2924  
((InvocationTargetException)e).getTargetException() != null) {
-2925error = 
((InvocationTargetException)e).getTargetException();
-2926  }
-2927  throw new RuntimeException("Failed 
construction of Master: " + masterClass.toString() + ". "
-2928, error);
-2929}
-2930  }
-2931
-2932  /**
-2933   * @see 
org.apache.hadoop.hbase.master.HMasterCommandLine
-2934   */
-2935  public static void main(String [] 
args) {
-2936LOG.info("STARTING service " + 
HMaster.class.getSimpleName());
-2937VersionInfo.logVersion();
-2938new 
HMasterCommandLine(HMaster.class).doMain(args);
-2939  }
-2940
-2941  public HFileCleaner getHFileCleaner() 
{
-2942return this.hfileCleaner;
-2943  }
-2944
-2945  public LogCleaner getLogCleaner() {

[30/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

2018-05-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
index c893aec..241c271 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
@@ -34,7 +34,7 @@
 026 * An immutable type to hold a hostname 
and port combo, like an Endpoint
 027 * or java.net.InetSocketAddress (but 
without danger of our calling
 028 * resolve -- we do NOT want a resolve 
happening every time we want
-029 * to hold a hostname and port combo). 
This class is also Comparable.
+029 * to hold a hostname and port combo). 
This class is also {@link Comparable}
 030 * pIn implementation this class 
is a facade over Guava's {@link HostAndPort}.
 031 * We cannot have Guava classes in our 
API hence this Type.
 032 */

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
index 7654a0e..4ff5a79 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html
@@ -150,183 +150,188 @@
 142   * Add a child procedure to execute
 143   * @param subProcedure the child 
procedure
 144   */
-145  protected void 
addChildProcedure(ProcedureTEnvironment... subProcedure) {
-146if (subProcedure == null) return;
-147final int len = 
subProcedure.length;
-148if (len == 0) return;
-149if (subProcList == null) {
-150  subProcList = new 
ArrayList(len);
-151}
-152for (int i = 0; i  len; ++i) {
-153  ProcedureTEnvironment proc 
= subProcedure[i];
-154  if (!proc.hasOwner()) 
proc.setOwner(getOwner());
-155  subProcList.add(proc);
+145  protected T extends 
ProcedureTEnvironment void addChildProcedure(
+146  @SuppressWarnings("unchecked") T... 
subProcedure) {
+147if (subProcedure == null) {
+148  return;
+149}
+150final int len = 
subProcedure.length;
+151if (len == 0) {
+152  return;
+153}
+154if (subProcList == null) {
+155  subProcList = new 
ArrayList(len);
 156}
-157  }
-158
-159  @Override
-160  protected Procedure[] execute(final 
TEnvironment env)
-161  throws ProcedureSuspendedException, 
ProcedureYieldException, InterruptedException {
-162updateTimestamp();
-163try {
-164  failIfAborted();
-165
-166  if (!hasMoreState() || isFailed()) 
return null;
-167  TState state = getCurrentState();
-168  if (stateCount == 0) {
-169
setNextState(getStateId(state));
-170  }
-171
-172  if (LOG.isTraceEnabled()) {
-173LOG.trace(state  + " " + this + 
"; cycles=" + this.cycles);
-174  }
-175  // Keep running count of cycles
-176  if (getStateId(state) != 
this.previousState) {
-177this.previousState = 
getStateId(state);
-178this.cycles = 0;
-179  } else {
-180this.cycles++;
-181  }
-182
-183  LOG.trace("{}", toString());
-184  stateFlow = executeFromState(env, 
state);
-185  if (!hasMoreState()) 
setNextState(EOF_STATE);
-186  if (subProcList != null  
!subProcList.isEmpty()) {
-187Procedure[] subProcedures = 
subProcList.toArray(new Procedure[subProcList.size()]);
-188subProcList = null;
-189return subProcedures;
-190  }
-191  return (isWaiting() || isFailed() 
|| !hasMoreState()) ? null : new Procedure[] {this};
-192} finally {
-193  updateTimestamp();
-194}
-195  }
-196
-197  @Override
-198  protected void rollback(final 
TEnvironment env)
-199  throws IOException, 
InterruptedException {
-200if (isEofState()) stateCount--;
-201try {
-202  updateTimestamp();
-203  rollbackState(env, 
getCurrentState());
-204  stateCount--;
-205} finally {
-206  updateTimestamp();
-207}
-208  }
-209
-210  private boolean isEofState() {
-211return stateCount  0  
states[stateCount-1] == EOF_STATE;
-212  }
-213
-214  @Override
-215  protected boolean abort(final 
TEnvironment env) {
-216LOG.debug("Abort requested for {}", 
this);
-217if (hasMoreState()) {
-218  aborted.set(true);
-219  return true;
-220}
-221LOG.debug("Ignoring abort request on 
{}", this);
-222return false;
-223  }
-224
-225  /**
-226   * If procedure has more states then 
abort it otherwise procedure is finished and abort can be
-227   * ignored.
-228   */
-229  protected final 

[30/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

2018-05-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/CellComparatorImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellComparatorImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellComparatorImpl.html
index 05d2c3c..6e02e32 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/CellComparatorImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/CellComparatorImpl.html
@@ -26,441 +26,376 @@
 018
 019package org.apache.hadoop.hbase;
 020
-021import 
org.apache.hadoop.hbase.KeyValue.Type;
-022import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-023import 
org.apache.hadoop.hbase.util.Bytes;
-024import 
org.apache.yetus.audience.InterfaceAudience;
-025import 
org.apache.yetus.audience.InterfaceStability;
-026import org.slf4j.Logger;
-027import org.slf4j.LoggerFactory;
-028
-029import 
org.apache.hbase.thirdparty.com.google.common.primitives.Longs;
-030
-031/**
-032 * Compare two HBase cells.  Do not use 
this method comparing code-ROOT-/code or
-033 * codehbase:meta/code 
cells.  Cells from these tables need a specialized comparator, one that
-034 * takes account of the special 
formatting of the row where we have commas to delimit table from
-035 * regionname, from row.  See KeyValue 
for how it has a special comparator to do hbase:meta cells
-036 * and yet another for -ROOT-.
-037 * While using this comparator for 
{{@link #compareRows(Cell, Cell)} et al, the hbase:meta cells
-038 * format should be taken into 
consideration, for which the instance of this comparator
-039 * should be used.  In all other cases 
the static APIs in this comparator would be enough
-040 */
-041@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-042value="UNKNOWN",
-043justification="Findbugs doesn't like 
the way we are negating the result of a compare in below")
-044@InterfaceAudience.Private
-045@InterfaceStability.Evolving
-046public class CellComparatorImpl 
implements CellComparator {
-047  static final Logger LOG = 
LoggerFactory.getLogger(CellComparatorImpl.class);
-048  /**
-049   * Comparator for plain key/values; 
i.e. non-catalog table key/values. Works on Key portion
-050   * of KeyValue only.
-051   */
-052  public static final CellComparatorImpl 
COMPARATOR = new CellComparatorImpl();
-053  /**
-054   * A {@link CellComparatorImpl} for 
codehbase:meta/code catalog table
-055   * {@link KeyValue}s.
-056   */
-057  public static final CellComparatorImpl 
META_COMPARATOR = new MetaCellComparator();
-058
-059  @Override
-060  public int compare(Cell a, Cell b) {
-061return compare(a, b, false);
-062  }
-063
-064  /**
-065   * Compare cells.
-066   * @param ignoreSequenceid True if we 
are to compare the key portion only and ignore
-067   *  the sequenceid. Set to false to 
compare key and consider sequenceid.
-068   * @return 0 if equal, -1 if a lt; 
b, and +1 if a gt; b.
-069   */
-070  @Override
-071  public int compare(final Cell a, final 
Cell b, boolean ignoreSequenceid) {
-072int diff = 0;
-073if (a instanceof ByteBufferKeyValue 
 b instanceof ByteBufferKeyValue) {
-074  diff = 
compareByteBufferKeyValue((ByteBufferKeyValue)a, (ByteBufferKeyValue)b);
-075  if (diff != 0) {
-076return diff;
-077  }
-078} else {
-079  diff = compareRows(a, b);
-080  if (diff != 0) {
-081return diff;
-082  }
-083
-084  diff = compareWithoutRow(a, b);
-085  if (diff != 0) {
-086return diff;
-087  }
-088}
-089
-090// Negate following comparisons so 
later edits show up first mvccVersion: later sorts first
-091return ignoreSequenceid? diff: 
Longs.compare(b.getSequenceId(), a.getSequenceId());
-092  }
-093
-094  /**
-095   * Specialized comparator for the 
ByteBufferKeyValue type exclusivesly.
-096   * Caches deserialized lengths of rows 
and families, etc., and reuses them where it can
-097   * (ByteBufferKeyValue has been changed 
to be amenable to our providing pre-made lengths, etc.)
-098   */
-099  private static final int 
compareByteBufferKeyValue(ByteBufferKeyValue left,
-100  ByteBufferKeyValue right) {
-101// Compare Rows. Cache row length.
-102int leftRowLength = 
left.getRowLength();
-103int rightRowLength = 
right.getRowLength();
-104int diff = 
ByteBufferUtils.compareTo(
-105left.getRowByteBuffer(), 
left.getRowPosition(), leftRowLength,
-106right.getRowByteBuffer(), 
right.getRowPosition(), rightRowLength);
-107if (diff != 0) {
-108  return diff;
-109}
-110
-111// If the column is not specified, 
the "minimum" key type appears the
-112// latest in the sorted order, 
regardless of the timestamp. This is used
-113// for specifying the last key/value 
in a given row, because there is no
-114// "lexicographically last column" 
(it would be infinitely long). The
-115// "maximum" key type does not need 

[30/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

2018-05-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index a97dfdc..2b1b6c6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -2370,1287 +2370,1292 @@
 2362  }
 2363
 2364  @Override
-2365  public long modifyTable(final 
TableName tableName, final TableDescriptor descriptor,
+2365  public long modifyTable(final 
TableName tableName, final TableDescriptor newDescriptor,
 2366  final long nonceGroup, final long 
nonce) throws IOException {
 2367checkInitialized();
-2368
sanityCheckTableDescriptor(descriptor);
+2368
sanityCheckTableDescriptor(newDescriptor);
 2369
 2370return 
MasterProcedureUtil.submitProcedure(
 2371new 
MasterProcedureUtil.NonceProcedureRunnable(this, nonceGroup, nonce) {
 2372  @Override
 2373  protected void run() throws 
IOException {
-2374
getMaster().getMasterCoprocessorHost().preModifyTable(tableName, descriptor);
-2375
-2376
LOG.info(getClientIdAuditPrefix() + " modify " + tableName);
+2374TableDescriptor oldDescriptor = 
getMaster().getTableDescriptors().get(tableName);
+2375
getMaster().getMasterCoprocessorHost()
+2376  .preModifyTable(tableName, 
oldDescriptor, newDescriptor);
 2377
-2378// Execute the operation 
synchronously - wait for the operation completes before continuing.
-2379//
-2380// We need to wait for the 
procedure to potentially fail due to "prepare" sanity
-2381// checks. This will block only 
the beginning of the procedure. See HBASE-19953.
-2382ProcedurePrepareLatch latch = 
ProcedurePrepareLatch.createBlockingLatch();
-2383submitProcedure(new 
ModifyTableProcedure(procedureExecutor.getEnvironment(),
-2384descriptor, latch));
-2385latch.await();
-2386
-2387
getMaster().getMasterCoprocessorHost().postModifyTable(tableName, 
descriptor);
-2388  }
-2389
-2390  @Override
-2391  protected String getDescription() 
{
-2392return "ModifyTableProcedure";
-2393  }
-2394});
-2395  }
-2396
-2397  public long restoreSnapshot(final 
SnapshotDescription snapshotDesc,
-2398  final long nonceGroup, final long 
nonce, final boolean restoreAcl) throws IOException {
-2399checkInitialized();
-2400
getSnapshotManager().checkSnapshotSupport();
-2401
-2402// Ensure namespace exists. Will 
throw exception if non-known NS.
-2403final TableName dstTable = 
TableName.valueOf(snapshotDesc.getTable());
-2404
getClusterSchema().getNamespace(dstTable.getNamespaceAsString());
-2405
-2406return 
MasterProcedureUtil.submitProcedure(
-2407new 
MasterProcedureUtil.NonceProcedureRunnable(this, nonceGroup, nonce) {
-2408  @Override
-2409  protected void run() throws 
IOException {
-2410  setProcId(
-2411
getSnapshotManager().restoreOrCloneSnapshot(snapshotDesc, getNonceKey(), 
restoreAcl));
-2412  }
-2413
-2414  @Override
-2415  protected String getDescription() 
{
-2416return 
"RestoreSnapshotProcedure";
-2417  }
-2418});
-2419  }
-2420
-2421  private void checkTableExists(final 
TableName tableName)
-2422  throws IOException, 
TableNotFoundException {
-2423if 
(!MetaTableAccessor.tableExists(getConnection(), tableName)) {
-2424  throw new 
TableNotFoundException(tableName);
-2425}
-2426  }
-2427
-2428  @Override
-2429  public void checkTableModifiable(final 
TableName tableName)
-2430  throws IOException, 
TableNotFoundException, TableNotDisabledException {
-2431if (isCatalogTable(tableName)) {
-2432  throw new IOException("Can't 
modify catalog tables");
-2433}
-2434checkTableExists(tableName);
-2435TableState ts = 
getTableStateManager().getTableState(tableName);
-2436if (!ts.isDisabled()) {
-2437  throw new 
TableNotDisabledException("Not DISABLED; " + ts);
-2438}
-2439  }
-2440
-2441  public ClusterMetrics 
getClusterMetricsWithoutCoprocessor() throws InterruptedIOException {
-2442return 
getClusterMetricsWithoutCoprocessor(EnumSet.allOf(Option.class));
-2443  }
-2444
-2445  public ClusterMetrics 
getClusterMetricsWithoutCoprocessor(EnumSetOption options)
-2446  throws InterruptedIOException {
-2447ClusterMetricsBuilder builder = 
ClusterMetricsBuilder.newBuilder();
-2448// given that hbase1 can't submit 
the request with Option,
-2449// we return all information to 
client if the list of Option is empty.
-2450if (options.isEmpty()) {
-2451  options = 

[30/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

2018-05-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
index d4390be..1037b84 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
@@ -163,12 +163,12 @@
 155   */
 156  @Override
 157  public MemStoreSize size() {
-158MemStoreSizing memstoreSizing = new 
MemStoreSizing();
+158MemStoreSizing memstoreSizing = new 
NonThreadSafeMemStoreSizing();
 159
memstoreSizing.incMemStoreSize(active.getMemStoreSize());
 160for (Segment item : 
pipeline.getSegments()) {
 161  
memstoreSizing.incMemStoreSize(item.getMemStoreSize());
 162}
-163return memstoreSizing;
+163return 
memstoreSizing.getMemStoreSize();
 164  }
 165
 166  /**
@@ -224,369 +224,365 @@
 216return new 
MemStoreSnapshot(snapshotId, this.snapshot);
 217  }
 218
-219  /**
-220   * On flush, how much memory we will 
clear.
-221   * @return size of data that is going 
to be flushed
-222   */
-223  @Override
-224  public MemStoreSize getFlushableSize() 
{
-225MemStoreSizing snapshotSizing = 
getSnapshotSizing();
-226if (snapshotSizing.getDataSize() == 
0) {
-227  // if snapshot is empty the tail of 
the pipeline (or everything in the memstore) is flushed
-228  if (compositeSnapshot) {
-229snapshotSizing = 
pipeline.getPipelineSizing();
-230
snapshotSizing.incMemStoreSize(active.getMemStoreSize());
-231  } else {
-232snapshotSizing = 
pipeline.getTailSizing();
-233  }
-234}
-235return snapshotSizing.getDataSize() 
 0 ? snapshotSizing
-236: new 
MemStoreSize(active.getMemStoreSize());
-237  }
-238
-239  @Override
-240  protected long keySize() {
-241// Need to consider keySize of all 
segments in pipeline and active
-242long k = this.active.keySize();
-243for (Segment segment : 
this.pipeline.getSegments()) {
-244  k += segment.keySize();
-245}
-246return k;
-247  }
-248
-249  @Override
-250  protected long heapSize() {
-251// Need to consider heapOverhead of 
all segments in pipeline and active
-252long h = this.active.heapSize();
-253for (Segment segment : 
this.pipeline.getSegments()) {
-254  h += segment.heapSize();
-255}
-256return h;
-257  }
-258
-259  @Override
-260  public void 
updateLowestUnflushedSequenceIdInWAL(boolean onlyIfGreater) {
-261long minSequenceId = 
pipeline.getMinSequenceId();
-262if(minSequenceId != Long.MAX_VALUE) 
{
-263  byte[] encodedRegionName = 
getRegionServices().getRegionInfo().getEncodedNameAsBytes();
-264  byte[] familyName = 
getFamilyNameInBytes();
-265  WAL WAL = 
getRegionServices().getWAL();
-266  if (WAL != null) {
-267
WAL.updateStore(encodedRegionName, familyName, minSequenceId, onlyIfGreater);
-268  }
-269}
-270  }
-271
-272  /**
-273   * This message intends to inform the 
MemStore that next coming updates
-274   * are going to be part of the 
replaying edits from WAL
-275   */
-276  @Override
-277  public void startReplayingFromWAL() {
-278inWalReplay = true;
-279  }
-280
-281  /**
-282   * This message intends to inform the 
MemStore that the replaying edits from WAL
-283   * are done
-284   */
-285  @Override
-286  public void stopReplayingFromWAL() {
-287inWalReplay = false;
-288  }
-289
-290  // the getSegments() method is used for 
tests only
-291  @VisibleForTesting
-292  @Override
-293  protected ListSegment 
getSegments() {
-294List? extends Segment 
pipelineList = pipeline.getSegments();
-295ListSegment list = new 
ArrayList(pipelineList.size() + 2);
-296list.add(this.active);
-297list.addAll(pipelineList);
-298
list.addAll(this.snapshot.getAllSegments());
-299
-300return list;
-301  }
-302
-303  // the following three methods allow to 
manipulate the settings of composite snapshot
-304  public void 
setCompositeSnapshot(boolean useCompositeSnapshot) {
-305this.compositeSnapshot = 
useCompositeSnapshot;
-306  }
-307
-308  public boolean 
swapCompactedSegments(VersionedSegmentsList versionedList, ImmutableSegment 
result,
-309  boolean merge) {
-310// last true stands for updating the 
region size
-311return pipeline.swap(versionedList, 
result, !merge, true);
-312  }
-313
-314  /**
-315   * @param requesterVersion The caller 
must hold the VersionedList of the pipeline
-316   *   with version taken 
earlier. This version must be passed as a parameter here.
-317   *   The flattening happens 
only if versions match.
-318   */
-319  public void flattenOneSegment(long 
requesterVersion,  

[30/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

2018-05-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
index 2510283..418c60c 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
@@ -77,77 +77,77 @@
 069import 
org.apache.hadoop.hbase.client.RowMutations;
 070import 
org.apache.hadoop.hbase.client.Scan;
 071import 
org.apache.hadoop.hbase.client.Table;
-072import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-073import 
org.apache.hadoop.hbase.filter.Filter;
-074import 
org.apache.hadoop.hbase.filter.FilterAllFilter;
-075import 
org.apache.hadoop.hbase.filter.FilterList;
-076import 
org.apache.hadoop.hbase.filter.PageFilter;
-077import 
org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-078import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-079import 
org.apache.hadoop.hbase.io.compress.Compression;
-080import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-081import 
org.apache.hadoop.hbase.io.hfile.RandomDistribution;
-082import 
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-083import 
org.apache.hadoop.hbase.regionserver.BloomType;
-084import 
org.apache.hadoop.hbase.regionserver.CompactingMemStore;
-085import 
org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
-086import 
org.apache.hadoop.hbase.trace.SpanReceiverHost;
-087import 
org.apache.hadoop.hbase.trace.TraceUtil;
-088import 
org.apache.hadoop.hbase.util.ByteArrayHashKey;
-089import 
org.apache.hadoop.hbase.util.Bytes;
-090import 
org.apache.hadoop.hbase.util.Hash;
-091import 
org.apache.hadoop.hbase.util.MurmurHash;
-092import 
org.apache.hadoop.hbase.util.Pair;
-093import 
org.apache.hadoop.hbase.util.YammerHistogramUtils;
-094import 
org.apache.hadoop.io.LongWritable;
-095import org.apache.hadoop.io.Text;
-096import org.apache.hadoop.mapreduce.Job;
-097import 
org.apache.hadoop.mapreduce.Mapper;
-098import 
org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
-099import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-100import 
org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
-101import org.apache.hadoop.util.Tool;
-102import 
org.apache.hadoop.util.ToolRunner;
-103import 
org.apache.htrace.core.ProbabilitySampler;
-104import org.apache.htrace.core.Sampler;
-105import 
org.apache.htrace.core.TraceScope;
-106import 
org.apache.yetus.audience.InterfaceAudience;
-107import org.slf4j.Logger;
-108import org.slf4j.LoggerFactory;
-109import 
org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
-110import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-111
-112/**
-113 * Script used evaluating HBase 
performance and scalability.  Runs a HBase
-114 * client that steps through one of a set 
of hardcoded tests or 'experiments'
-115 * (e.g. a random reads test, a random 
writes test, etc.). Pass on the
-116 * command-line which test to run and how 
many clients are participating in
-117 * this experiment. Run {@code 
PerformanceEvaluation --help} to obtain usage.
-118 *
-119 * pThis class sets up and runs 
the evaluation programs described in
-120 * Section 7, iPerformance 
Evaluation/i, of the a
-121 * 
href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
-122 * paper, pages 8-10.
-123 *
-124 * pBy default, runs as a 
mapreduce job where each mapper runs a single test
-125 * client. Can also run as a 
non-mapreduce, multithreaded application by
-126 * specifying {@code --nomapred}. Each 
client does about 1GB of data, unless
-127 * specified otherwise.
-128 */
-129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-130public class PerformanceEvaluation 
extends Configured implements Tool {
-131  static final String RANDOM_SEEK_SCAN = 
"randomSeekScan";
-132  static final String RANDOM_READ = 
"randomRead";
-133  private static final Logger LOG = 
LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
-134  private static final ObjectMapper 
MAPPER = new ObjectMapper();
-135  static {
-136
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
-137  }
-138
-139  public static final String TABLE_NAME = 
"TestTable";
-140  public static final byte[] FAMILY_NAME 
= Bytes.toBytes("info");
-141  public static final byte [] COLUMN_ZERO 
= Bytes.toBytes("" + 0);
-142  public static final byte [] 
QUALIFIER_NAME = COLUMN_ZERO;
+072import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+073import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+074import 
org.apache.hadoop.hbase.filter.Filter;
+075import 

[30/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[30/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

2018-04-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index e63cd50..d8c0d2b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -422,7 +422,7 @@
 414  }
 415
 416  /**
-417   * {@link #listTables(boolean)}
+417   * {@link 
#listTableDescriptors(boolean)}
 418   */
 419  @Override
 420  public 
CompletableFutureListTableDescriptor 
listTableDescriptors(Pattern pattern,
@@ -3476,16 +3476,79 @@
 3468return future;
 3469  }
 3470
-3471  private 
CompletableFutureCacheEvictionStats clearBlockCache(ServerName 
serverName,
-3472  ListRegionInfo hris) {
-3473return 
this.CacheEvictionStats newAdminCaller().action((controller, stub) 
- this
-3474  .ClearRegionBlockCacheRequest, 
ClearRegionBlockCacheResponse, CacheEvictionStats adminCall(
-3475controller, stub, 
RequestConverter.buildClearRegionBlockCacheRequest(hris),
-3476(s, c, req, done) - 
s.clearRegionBlockCache(controller, req, done),
-3477resp - 
ProtobufUtil.toCacheEvictionStats(resp.getStats(
-3478  .serverName(serverName).call();
-3479  }
-3480}
+3471  @Override
+3472  public CompletableFutureVoid 
cloneTableSchema(TableName tableName, TableName newTableName,
+3473  boolean preserveSplits) {
+3474CompletableFutureVoid future 
= new CompletableFuture();
+3475
tableExists(tableName).whenComplete(
+3476  (exist, err) - {
+3477if (err != null) {
+3478  
future.completeExceptionally(err);
+3479  return;
+3480}
+3481if (!exist) {
+3482  
future.completeExceptionally(new TableNotFoundException(tableName));
+3483  return;
+3484}
+3485
tableExists(newTableName).whenComplete(
+3486  (exist1, err1) - {
+3487if (err1 != null) {
+3488  
future.completeExceptionally(err1);
+3489  return;
+3490}
+3491if (exist1) {
+3492  
future.completeExceptionally(new TableExistsException(newTableName));
+3493  return;
+3494}
+3495
getDescriptor(tableName).whenComplete(
+3496  (tableDesc, err2) - 
{
+3497if (err2 != null) {
+3498  
future.completeExceptionally(err2);
+3499  return;
+3500}
+3501TableDescriptor 
newTableDesc
+3502= 
TableDescriptorBuilder.copy(newTableName, tableDesc);
+3503if (preserveSplits) {
+3504  
getTableSplits(tableName).whenComplete((splits, err3) - {
+3505if (err3 != null) 
{
+3506  
future.completeExceptionally(err3);
+3507} else {
+3508  
createTable(newTableDesc, splits).whenComplete(
+3509(result, err4) 
- {
+3510  if (err4 != 
null) {
+3511
future.completeExceptionally(err4);
+3512  } else {
+3513
future.complete(result);
+3514  }
+3515});
+3516}
+3517  });
+3518} else {
+3519  
createTable(newTableDesc).whenComplete(
+3520(result, err5) - 
{
+3521  if (err5 != null) 
{
+3522
future.completeExceptionally(err5);
+3523  } else {
+3524
future.complete(result);
+3525  }
+3526});
+3527}
+3528  });
+3529  });
+3530  });
+3531return future;
+3532  }
+3533
+3534  private 
CompletableFutureCacheEvictionStats clearBlockCache(ServerName 
serverName,
+3535  ListRegionInfo hris) {
+3536return 
this.CacheEvictionStats newAdminCaller().action((controller, stub) 
- this
+3537  .ClearRegionBlockCacheRequest, 
ClearRegionBlockCacheResponse, CacheEvictionStats adminCall(
+3538controller, stub, 
RequestConverter.buildClearRegionBlockCacheRequest(hris),
+3539(s, c, req, done) - 
s.clearRegionBlockCache(controller, req, done),
+3540resp - 
ProtobufUtil.toCacheEvictionStats(resp.getStats(
+3541  .serverName(serverName).call();
+3542  }
+3543}
 
 
 


[30/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

2018-03-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html 
b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
index aec17e1..ee6838f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -180,7 +180,7 @@ implements 
-private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+(package private) https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 regionsZNode
 
 
@@ -415,24 +415,30 @@ implements 
 void
+removeLastSequenceIds(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
+Remove all the max sequence id record for the given 
peer.
+
+
+
+void
 removePeerFromHFileRefs(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 Remove a peer from hfile reference queue.
 
 
-
+
 void
 removeQueue(ServerNameserverName,
https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId)
 Remove a replication queue for a given regionserver.
 
 
-
+
 void
 removeReplicatorIfQueueIsEmpty(ServerNameserverName)
 Remove the record of region server if the queue is 
empty.
 
 
-
+
 void
 removeWAL(ServerNameserverName,
  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId,
@@ -440,14 +446,14 @@ implements Remove an WAL file from the given queue for a given 
regionserver.
 
 
-
+
 void
 setLastSequenceIds(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
   https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">LonglastSeqIds)
 Set the max sequence id of a bunch of regions for a given 
peer.
 
 
-
+
 void
 setWALPosition(ServerNameserverName,
   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId,
@@ -573,7 +579,7 @@ implements 
 
 regionsZNode
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String regionsZNode
+finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String regionsZNode
 
 
 
@@ -590,7 +596,7 @@ implements 
 
 ZKReplicationQueueStorage
-publicZKReplicationQueueStorage(ZKWatcherzookeeper,
+publicZKReplicationQueueStorage(ZKWatcherzookeeper,
  
org.apache.hadoop.conf.Configurationconf)
 
 
@@ -608,7 +614,7 @@ implements 
 
 getRsNode
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetRsNode(ServerNameserverName)
+privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetRsNode(ServerNameserverName)
 
 
 
@@ -617,7 +623,7 @@ implements 
 
 getQueueNode
-privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

2018-03-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index 2f412f5..96c070a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -121,7 +121,7 @@
 113  protected Mutation(Mutation clone) {
 114super(clone);
 115this.row = clone.getRow();
-116this.ts = clone.getTimeStamp();
+116this.ts = clone.getTimestamp();
 117this.familyMap = 
clone.getFamilyCellMap().entrySet().stream()
 118  .collect(Collectors.toMap(e - 
e.getKey(), e - new ArrayList(e.getValue()),
 119(k, v) - {
@@ -352,617 +352,629 @@
 344  /**
 345   * Method for retrieving the 
timestamp
 346   * @return timestamp
-347   */
-348  public long getTimeStamp() {
-349return this.ts;
-350  }
-351
-352  /**
-353   * Marks that the clusters with the 
given clusterIds have consumed the mutation
-354   * @param clusterIds of the clusters 
that have consumed the mutation
-355   */
-356  public Mutation 
setClusterIds(ListUUID clusterIds) {
-357ByteArrayDataOutput out = 
ByteStreams.newDataOutput();
-358out.writeInt(clusterIds.size());
-359for (UUID clusterId : clusterIds) {
-360  
out.writeLong(clusterId.getMostSignificantBits());
-361  
out.writeLong(clusterId.getLeastSignificantBits());
-362}
-363setAttribute(CONSUMED_CLUSTER_IDS, 
out.toByteArray());
-364return this;
-365  }
-366
-367  /**
-368   * @return the set of clusterIds that 
have consumed the mutation
-369   */
-370  public ListUUID getClusterIds() 
{
-371ListUUID clusterIds = new 
ArrayList();
-372byte[] bytes = 
getAttribute(CONSUMED_CLUSTER_IDS);
-373if(bytes != null) {
-374  ByteArrayDataInput in = 
ByteStreams.newDataInput(bytes);
-375  int numClusters = in.readInt();
-376  for(int i=0; inumClusters; 
i++){
-377clusterIds.add(new 
UUID(in.readLong(), in.readLong()));
-378  }
-379}
-380return clusterIds;
-381  }
-382
-383  /**
-384   * Sets the visibility expression 
associated with cells in this Mutation.
-385   * @param expression
-386   */
-387  public Mutation 
setCellVisibility(CellVisibility expression) {
-388
this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY,
-389
toCellVisibility(expression).toByteArray());
-390return this;
-391  }
-392
-393  /**
-394   * @return CellVisibility associated 
with cells in this Mutation.
-395   * @throws DeserializationException
-396   */
-397  public CellVisibility 
getCellVisibility() throws DeserializationException {
-398byte[] cellVisibilityBytes = 
this.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY);
-399if (cellVisibilityBytes == null) 
return null;
-400return 
toCellVisibility(cellVisibilityBytes);
-401  }
-402
-403  /**
-404   * Create a protocol buffer 
CellVisibility based on a client CellVisibility.
-405   *
-406   * @param cellVisibility
-407   * @return a protocol buffer 
CellVisibility
+347   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
+348   * Use {@link 
#getTimestamp()} instead
+349   */
+350  @Deprecated
+351  public long getTimeStamp() {
+352return this.getTimestamp();
+353  }
+354
+355  /**
+356   * Method for retrieving the 
timestamp.
+357   *
+358   * @return timestamp
+359   */
+360  public long getTimestamp() {
+361return this.ts;
+362  }
+363
+364  /**
+365   * Marks that the clusters with the 
given clusterIds have consumed the mutation
+366   * @param clusterIds of the clusters 
that have consumed the mutation
+367   */
+368  public Mutation 
setClusterIds(ListUUID clusterIds) {
+369ByteArrayDataOutput out = 
ByteStreams.newDataOutput();
+370out.writeInt(clusterIds.size());
+371for (UUID clusterId : clusterIds) {
+372  
out.writeLong(clusterId.getMostSignificantBits());
+373  
out.writeLong(clusterId.getLeastSignificantBits());
+374}
+375setAttribute(CONSUMED_CLUSTER_IDS, 
out.toByteArray());
+376return this;
+377  }
+378
+379  /**
+380   * @return the set of clusterIds that 
have consumed the mutation
+381   */
+382  public ListUUID getClusterIds() 
{
+383ListUUID clusterIds = new 
ArrayList();
+384byte[] bytes = 
getAttribute(CONSUMED_CLUSTER_IDS);
+385if(bytes != null) {
+386  ByteArrayDataInput in = 
ByteStreams.newDataInput(bytes);
+387  int numClusters = in.readInt();
+388  for(int i=0; inumClusters; 
i++){
+389clusterIds.add(new 
UUID(in.readLong(), in.readLong()));
+390  }
+391}
+392return clusterIds;
+393  }
+394
+395  /**
+396   * Sets the visibility expression 
associated with cells in this Mutation.
+397   * @param expression
+398   */
+399  public 

[30/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

2018-03-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
index cabc286..e959408 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
@@ -128,7 +128,7 @@
 
 
 boolean
-ClusterConnection.isMasterRunning()
+ConnectionImplementation.isMasterRunning()
 Deprecated.
 this has been deprecated 
without a replacement
 
@@ -136,7 +136,7 @@
 
 
 boolean
-ConnectionImplementation.isMasterRunning()
+ClusterConnection.isMasterRunning()
 Deprecated.
 this has been deprecated 
without a replacement
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
index 26611ed..bba209a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
@@ -270,31 +270,31 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
-Get a namespace descriptor by name
-
+AsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-RawAsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+AsyncAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Get a namespace descriptor by name
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+RawAsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncAdmin.listNamespaceDescriptors()
-List available namespace descriptors
-
+AsyncHBaseAdmin.listNamespaceDescriptors()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-RawAsyncHBaseAdmin.listNamespaceDescriptors()
+AsyncAdmin.listNamespaceDescriptors()
+List available namespace descriptors
+
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncHBaseAdmin.listNamespaceDescriptors()
+RawAsyncHBaseAdmin.listNamespaceDescriptors()
 
 
 
@@ -307,9 +307,7 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncAdmin.createNamespace(NamespaceDescriptordescriptor)
-Create a new namespace.
-

[30/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

2018-03-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index 1db7ad2..313a472 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -816,25 +816,30 @@
 
 
 void
-BaseLoadBalancer.initialize()
+FavoredStochasticBalancer.initialize()
 
 
 void
-FavoredStochasticBalancer.initialize()
+BaseLoadBalancer.initialize()
 
 
 ServerName
+FavoredStochasticBalancer.randomAssignment(RegionInforegionInfo,
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
+
+
+ServerName
 BaseLoadBalancer.randomAssignment(RegionInforegionInfo,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
 Used to assign a single region to a random server.
 
 
-
-ServerName
-FavoredStochasticBalancer.randomAssignment(RegionInforegionInfo,
+
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
+FavoredStochasticBalancer.retainAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionInfo,ServerNameregions,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 BaseLoadBalancer.retainAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionInfo,ServerNameregions,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
@@ -843,12 +848,12 @@
  available/online servers available for assignment.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-FavoredStochasticBalancer.retainAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapRegionInfo,ServerNameregions,
-https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
+FavoredStochasticBalancer.roundRobinAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforegions,
+https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 BaseLoadBalancer.roundRobinAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforegions,
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
@@ -856,11 +861,6 @@
  simple round-robin assignment.
 
 
-
-https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
-FavoredStochasticBalancer.roundRobinAssignment(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInforegions,
-https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNameservers)
-
 
 private 

[30/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

2018-03-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
index ecf500c..0cd5a4e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
@@ -238,8355 +238,8368 @@
 230  public static final String 
HBASE_MAX_CELL_SIZE_KEY = "hbase.server.keyvalue.maxsize";
 231  public static final int 
DEFAULT_MAX_CELL_SIZE = 10485760;
 232
-233  public static final String 
HBASE_REGIONSERVER_MINIBATCH_SIZE =
-234  
"hbase.regionserver.minibatch.size";
-235  public static final int 
DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE = 2;
-236
-237  /**
-238   * This is the global default value for 
durability. All tables/mutations not
-239   * defining a durability or using 
USE_DEFAULT will default to this value.
-240   */
-241  private static final Durability 
DEFAULT_DURABILITY = Durability.SYNC_WAL;
+233  /**
+234   * This is the global default value for 
durability. All tables/mutations not
+235   * defining a durability or using 
USE_DEFAULT will default to this value.
+236   */
+237  private static final Durability 
DEFAULT_DURABILITY = Durability.SYNC_WAL;
+238
+239  public static final String 
HBASE_REGIONSERVER_MINIBATCH_SIZE =
+240  
"hbase.regionserver.minibatch.size";
+241  public static final int 
DEFAULT_HBASE_REGIONSERVER_MINIBATCH_SIZE = 2;
 242
-243  final AtomicBoolean closed = new 
AtomicBoolean(false);
-244
-245  /* Closing can take some time; use the 
closing flag if there is stuff we don't
-246   * want to do while in closing state; 
e.g. like offer this region up to the
-247   * master as a region to close if the 
carrying regionserver is overloaded.
-248   * Once set, it is never cleared.
-249   */
-250  final AtomicBoolean closing = new 
AtomicBoolean(false);
-251
-252  /**
-253   * The max sequence id of flushed data 
on this region. There is no edit in memory that is
-254   * less that this sequence id.
-255   */
-256  private volatile long maxFlushedSeqId = 
HConstants.NO_SEQNUM;
-257
-258  /**
-259   * Record the sequence id of last flush 
operation. Can be in advance of
-260   * {@link #maxFlushedSeqId} when 
flushing a single column family. In this case,
-261   * {@link #maxFlushedSeqId} will be 
older than the oldest edit in memory.
-262   */
-263  private volatile long lastFlushOpSeqId 
= HConstants.NO_SEQNUM;
-264
-265  /**
-266   * The sequence id of the last replayed 
open region event from the primary region. This is used
-267   * to skip entries before this due to 
the possibility of replay edits coming out of order from
-268   * replication.
-269   */
-270  protected volatile long 
lastReplayedOpenRegionSeqId = -1L;
-271  protected volatile long 
lastReplayedCompactionSeqId = -1L;
-272
-273  
//
-274  // Members
-275  
//
-276
-277  // map from a locked row to the context 
for that lock including:
-278  // - CountDownLatch for threads waiting 
on that row
-279  // - the thread that owns the lock 
(allow reentrancy)
-280  // - reference count of (reentrant) 
locks held by the thread
-281  // - the row itself
-282  private final 
ConcurrentHashMapHashedBytes, RowLockContext lockedRows =
-283  new ConcurrentHashMap();
-284
-285  protected final Mapbyte[], 
HStore stores =
-286  new 
ConcurrentSkipListMap(Bytes.BYTES_RAWCOMPARATOR);
+243  public static final String 
WAL_HSYNC_CONF_KEY = "hbase.wal.hsync";
+244  public static final boolean 
DEFAULT_WAL_HSYNC = false;
+245
+246  final AtomicBoolean closed = new 
AtomicBoolean(false);
+247
+248  /* Closing can take some time; use the 
closing flag if there is stuff we don't
+249   * want to do while in closing state; 
e.g. like offer this region up to the
+250   * master as a region to close if the 
carrying regionserver is overloaded.
+251   * Once set, it is never cleared.
+252   */
+253  final AtomicBoolean closing = new 
AtomicBoolean(false);
+254
+255  /**
+256   * The max sequence id of flushed data 
on this region. There is no edit in memory that is
+257   * less that this sequence id.
+258   */
+259  private volatile long maxFlushedSeqId = 
HConstants.NO_SEQNUM;
+260
+261  /**
+262   * Record the sequence id of last flush 
operation. Can be in advance of
+263   * {@link #maxFlushedSeqId} when 
flushing a single column family. In this case,
+264   * {@link #maxFlushedSeqId} will be 
older than the oldest edit in memory.
+265   */
+266  private volatile long lastFlushOpSeqId 

[30/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

2018-03-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
index f47d627..c3d225c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
@@ -117,219 +117,219 @@
 109   */
 110  public static boolean 
archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)
 111  throws IOException {
-112if (LOG.isDebugEnabled()) {
-113  LOG.debug("ARCHIVING " + 
regionDir.toString());
-114}
-115
-116// otherwise, we archive the files
-117// make sure we can archive
-118if (tableDir == null || regionDir == 
null) {
-119  LOG.error("No archive directory 
could be found because tabledir (" + tableDir
-120  + ") or regiondir (" + 
regionDir + "was null. Deleting files instead.");
-121  deleteRegionWithoutArchiving(fs, 
regionDir);
-122  // we should have archived, but 
failed to. Doesn't matter if we deleted
-123  // the archived files correctly or 
not.
-124  return false;
-125}
-126
-127// make sure the regiondir lives 
under the tabledir
-128
Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));
-129Path regionArchiveDir = 
HFileArchiveUtil.getRegionArchiveDir(rootdir,
-130FSUtils.getTableName(tableDir),
-131regionDir.getName());
-132
-133FileStatusConverter getAsFile = new 
FileStatusConverter(fs);
-134// otherwise, we attempt to archive 
the store files
-135
-136// build collection of just the store 
directories to archive
-137CollectionFile toArchive = 
new ArrayList();
-138final PathFilter dirFilter = new 
FSUtils.DirFilter(fs);
-139PathFilter nonHidden = new 
PathFilter() {
-140  @Override
-141  public boolean accept(Path file) 
{
-142return dirFilter.accept(file) 
 !file.getName().toString().startsWith(".");
-143  }
-144};
-145FileStatus[] storeDirs = 
FSUtils.listStatus(fs, regionDir, nonHidden);
-146// if there no files, we can just 
delete the directory and return;
-147if (storeDirs == null) {
-148  LOG.debug("Region directory " + 
regionDir + " empty.");
-149  return 
deleteRegionWithoutArchiving(fs, regionDir);
-150}
-151
-152// convert the files in the region to 
a File
-153
toArchive.addAll(Lists.transform(Arrays.asList(storeDirs), getAsFile));
-154LOG.debug("Archiving " + 
toArchive);
-155ListFile failedArchive = 
resolveAndArchive(fs, regionArchiveDir, toArchive,
-156
EnvironmentEdgeManager.currentTime());
-157if (!failedArchive.isEmpty()) {
-158  throw new 
FailedArchiveException("Failed to archive/delete all the files for region:"
-159  + regionDir.getName() + " into 
" + regionArchiveDir
-160  + ". Something is probably awry 
on the filesystem.",
-161  
Collections2.transform(failedArchive, FUNC_FILE_TO_PATH));
-162}
-163// if that was successful, then we 
delete the region
-164return 
deleteRegionWithoutArchiving(fs, regionDir);
-165  }
-166
-167  /**
-168   * Remove from the specified region the 
store files of the specified column family,
-169   * either by archiving them or outright 
deletion
-170   * @param fs the filesystem where the 
store files live
-171   * @param conf {@link Configuration} to 
examine to determine the archive directory
-172   * @param parent Parent region hosting 
the store files
-173   * @param tableDir {@link Path} to 
where the table is being stored (for building the archive path)
-174   * @param family the family hosting the 
store files
-175   * @throws IOException if the files 
could not be correctly disposed.
-176   */
-177  public static void 
archiveFamily(FileSystem fs, Configuration conf,
-178  RegionInfo parent, Path tableDir, 
byte[] family) throws IOException {
-179Path familyDir = new Path(tableDir, 
new Path(parent.getEncodedName(), Bytes.toString(family)));
-180archiveFamilyByFamilyDir(fs, conf, 
parent, familyDir, family);
-181  }
-182
-183  /**
-184   * Removes from the specified region 
the store files of the specified column family,
-185   * either by archiving them or outright 
deletion
-186   * @param fs the filesystem where the 
store files live
-187   * @param conf {@link Configuration} to 
examine to determine the archive directory
-188   * @param parent Parent region hosting 
the store files
-189   * @param familyDir {@link Path} to 
where the family is being stored
-190   * @param family the family hosting the 
store files
-191   * @throws IOException if the files 
could 

[30/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index 7e51664..aa9427f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":9,"i7":10,"i8":9,"i9":9,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":42,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9};
+var methods = 
{"i0":10,"i1":9,"i2":9,"i3":10,"i4":9,"i5":9,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class TableDescriptorBuilder
+public class TableDescriptorBuilder
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 Since:
@@ -167,93 +167,116 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 COMPACTION_ENABLED_KEY
 
 
+static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_KEY_PATTERN
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_VALUE_PARAM_PATTERN
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_VALUE_PATTERN
+
+ Pattern that matches a coprocessor specification.
+
+
+
 static boolean
 DEFAULT_COMPACTION_ENABLED
 Constant that denotes whether the table is compaction 
enabled by default
 
 
-
+
 private static Durability
 DEFAULT_DURABLITY
 Default durability for HTD is USE_DEFAULT, which defaults 
to HBase-global
  default value
 
 
-
+
 static long
 DEFAULT_MEMSTORE_FLUSH_SIZE
 Constant that denotes the maximum default size of the 
memstore after which
  the contents are flushed to the store files
 
 
-
+
 static boolean
 DEFAULT_NORMALIZATION_ENABLED
 Constant that denotes whether the table is normalized by 
default.
 
 
-
+
 private static int
 DEFAULT_PRIORITY
 Relative priority of the table used for rpc scheduling
 
 
-
+
 static boolean
 DEFAULT_READONLY
 Constant that denotes whether the table is READONLY by 
default and is false
 
 
-
+
 static boolean
 DEFAULT_REGION_MEMSTORE_REPLICATION
 
-
+
 static int
 DEFAULT_REGION_REPLICATION
 
-
+
 private static https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DEFAULT_VALUES
 
-
+
 private TableDescriptorBuilder.ModifyableTableDescriptor
 desc
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DURABILITY
 Durability 
setting for the table.
 
 
-
+
 private static Bytes
 DURABILITY_KEY
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 FLUSH_POLICY
 
-
+
 private static Bytes
 FLUSH_POLICY_KEY
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 IS_META
 Used by rest interface to access this metadata attribute
  which denotes if it is a catalog table, either  hbase:meta 
.
 
 
-
+
 private static Bytes
 

[30/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

2018-03-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
index 091cc10..e5b6270 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -356,39 +356,43 @@ extends 
 boolean
-isInitialized()
+isClusterUp()
 
 
 boolean
-isInMaintenanceMode()
+isInitialized()
 
 
 boolean
-isServerCrashProcessingEnabled()
+isInMaintenanceMode()
 
 
 boolean
-isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
+isServerCrashProcessingEnabled()
 
 
+boolean
+isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationPeerDescription
 listReplicationPeers(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Return a list of replication peers.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 listTableDescriptorsByNamespace(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get list of table descriptors by namespace
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableName
 listTableNamesByNamespace(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get list of table names by namespace
 
 
-
+
 long
 mergeRegions(RegionInfo[]regionsToMerge,
 booleanforcible,
@@ -397,7 +401,7 @@ extends Merge regions in a table.
 
 
-
+
 long
 modifyColumn(TableNametableName,
 ColumnFamilyDescriptordescriptor,
@@ -406,7 +410,7 @@ extends Modify the column descriptor of an existing column in an 
existing table
 
 
-
+
 long
 modifyTable(TableNametableName,
TableDescriptordescriptor,
@@ -415,25 +419,25 @@ extends Modify the descriptor of an existing table
 
 
-
+
 boolean
 recoverMeta()
 Recover meta table.
 
 
-
+
 boolean
 registerService(com.google.protobuf.Serviceinstance)
 Registers a new protocol buffer Service 
subclass as a master coprocessor endpoint.
 
 
-
+
 long
 removeReplicationPeer(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 Removes a peer and stops the replication
 
 
-
+
 long
 splitRegion(RegionInforegionInfo,
byte[]splitRow,
@@ -442,7 +446,7 @@ extends Split a region.
 
 
-
+
 long
 truncateTable(TableNametableName,
  booleanpreserveSplits,
@@ -451,7 +455,7 @@ extends Truncate a table
 
 
-
+
 long
 updateReplicationPeerConfig(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
ReplicationPeerConfigpeerConfig)
@@ -1454,12 +1458,25 @@ extends 
 
 
-
+
 
 getClientIdAuditPrefix
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetClientIdAuditPrefix()
 
 
+
+
+
+
+
+isClusterUp
+booleanisClusterUp()
+
+Returns:
+True if cluster is up; false if cluster is not up (we are shutting 
down).
+
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/master/TableNamespaceManager.html

[30/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

2018-03-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Size.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
index e8d1010..3f1b032 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Size.html
@@ -189,130 +189,130 @@
 
 
 Size
+RegionLoad.getBloomFilterSize()
+Deprecated.
+
+
+
+Size
 RegionMetrics.getBloomFilterSize()
 
+
+Size
+RegionMetricsBuilder.RegionMetricsImpl.getBloomFilterSize()
+
 
 Size
-RegionLoad.getBloomFilterSize()
+ServerLoad.getMaxHeapSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getBloomFilterSize()
+ServerMetrics.getMaxHeapSize()
 
 
 Size
-ServerMetrics.getMaxHeapSize()
+ServerMetricsBuilder.ServerMetricsImpl.getMaxHeapSize()
 
 
 Size
-ServerLoad.getMaxHeapSize()
+RegionLoad.getMemStoreSize()
 Deprecated.
 
 
 
 Size
-ServerMetricsBuilder.ServerMetricsImpl.getMaxHeapSize()
+RegionMetrics.getMemStoreSize()
 
 
 Size
-RegionMetrics.getMemStoreSize()
+RegionMetricsBuilder.RegionMetricsImpl.getMemStoreSize()
 
 
 Size
-RegionLoad.getMemStoreSize()
+RegionLoad.getStoreFileIndexSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getMemStoreSize()
-
-
-Size
 RegionMetrics.getStoreFileIndexSize()
 TODO: why we pass the same value to different counters? 
Currently, the value from
  getStoreFileIndexSize() is same with getStoreFileRootLevelIndexSize()
  see HRegionServer#createRegionLoad.
 
 
+
+Size
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileIndexSize()
+
 
 Size
-RegionLoad.getStoreFileIndexSize()
+RegionLoad.getStoreFileRootLevelIndexSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileIndexSize()
+RegionMetrics.getStoreFileRootLevelIndexSize()
 
 
 Size
-RegionMetrics.getStoreFileRootLevelIndexSize()
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileRootLevelIndexSize()
 
 
 Size
-RegionLoad.getStoreFileRootLevelIndexSize()
+RegionLoad.getStoreFileSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileRootLevelIndexSize()
+RegionMetrics.getStoreFileSize()
 
 
 Size
-RegionMetrics.getStoreFileSize()
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileSize()
 
 
 Size
-RegionLoad.getStoreFileSize()
+RegionLoad.getStoreFileUncompressedDataIndexSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileSize()
+RegionMetrics.getStoreFileUncompressedDataIndexSize()
 
 
 Size
-RegionMetrics.getStoreFileUncompressedDataIndexSize()
+RegionMetricsBuilder.RegionMetricsImpl.getStoreFileUncompressedDataIndexSize()
 
 
 Size
-RegionLoad.getStoreFileUncompressedDataIndexSize()
+RegionLoad.getUncompressedStoreFileSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getStoreFileUncompressedDataIndexSize()
+RegionMetrics.getUncompressedStoreFileSize()
 
 
 Size
-RegionMetrics.getUncompressedStoreFileSize()
+RegionMetricsBuilder.RegionMetricsImpl.getUncompressedStoreFileSize()
 
 
 Size
-RegionLoad.getUncompressedStoreFileSize()
+ServerLoad.getUsedHeapSize()
 Deprecated.
 
 
 
 Size
-RegionMetricsBuilder.RegionMetricsImpl.getUncompressedStoreFileSize()
-
-
-Size
 ServerMetrics.getUsedHeapSize()
 
-
-Size
-ServerLoad.getUsedHeapSize()
-Deprecated.
-
-
 
 Size
 ServerMetricsBuilder.ServerMetricsImpl.getUsedHeapSize()

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 72d579d..63833f7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -122,11 +122,11 @@
 
 
 TableDescriptors
-MasterServices.getTableDescriptors()
+HMaster.getTableDescriptors()
 
 
 TableDescriptors
-HMaster.getTableDescriptors()
+MasterServices.getTableDescriptors()
 
 
 



[30/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

2018-03-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
index e959408..cabc286 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
@@ -128,7 +128,7 @@
 
 
 boolean
-ConnectionImplementation.isMasterRunning()
+ClusterConnection.isMasterRunning()
 Deprecated.
 this has been deprecated 
without a replacement
 
@@ -136,7 +136,7 @@
 
 
 boolean
-ClusterConnection.isMasterRunning()
+ConnectionImplementation.isMasterRunning()
 Deprecated.
 this has been deprecated 
without a replacement
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
index bba209a..26611ed 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
@@ -270,32 +270,32 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
-AsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
-
-
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
 AsyncAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get a namespace descriptor by name
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
 RawAsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
-
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
-AsyncHBaseAdmin.listNamespaceDescriptors()
-
 
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureNamespaceDescriptor
+AsyncHBaseAdmin.getNamespaceDescriptor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
 AsyncAdmin.listNamespaceDescriptors()
 List available namespace descriptors
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
 RawAsyncHBaseAdmin.listNamespaceDescriptors()
 
+
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNamespaceDescriptor
+AsyncHBaseAdmin.listNamespaceDescriptors()
+
 
 
 
@@ -307,7 +307,9 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttps://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncHBaseAdmin.createNamespace(NamespaceDescriptordescriptor)
+AsyncAdmin.createNamespace(NamespaceDescriptordescriptor)

[30/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

2018-03-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
index d5857c6..d9f30b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HRegionFileSystem
+public class HRegionFileSystem
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 View to an on-disk Region.
  Provides the set of methods necessary to interact with the on-disk region 
data.
@@ -188,14 +188,18 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+private org.apache.hadoop.fs.Path
+regionDir
+
+
 private RegionInfo
 regionInfo
 
-
+
 private RegionInfo
 regionInfoForFs
 
-
+
 private org.apache.hadoop.fs.Path
 tableDir
 
@@ -657,7 +661,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -666,7 +670,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 REGION_INFO_FILE
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_INFO_FILE
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_INFO_FILE
 Name of the region info file that resides just under the 
region directory.
 
 See Also:
@@ -680,7 +684,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 REGION_MERGES_DIR
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_MERGES_DIR
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_MERGES_DIR
 Temporary subdirectory of the region directory used for 
merges.
 
 See Also:
@@ -694,7 +698,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 REGION_SPLITS_DIR
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SPLITS_DIR
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SPLITS_DIR
 Temporary subdirectory of the region directory used for 
splits.
 
 See Also:
@@ -708,7 +712,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 REGION_TEMP_DIR
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_TEMP_DIR
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_TEMP_DIR
 Temporary subdirectory of the region directory used for 
compaction output.
 
 See Also:
@@ -722,7 +726,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionInfo
-private finalRegionInfo regionInfo
+private finalRegionInfo regionInfo
 
 
 
@@ -731,7 +735,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 regionInfoForFs
-private finalRegionInfo regionInfoForFs
+private finalRegionInfo regionInfoForFs
 
 
 
@@ -740,7 +744,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 conf
-private finalorg.apache.hadoop.conf.Configuration conf
+private finalorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -749,7 +753,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tableDir
-private finalorg.apache.hadoop.fs.Path tableDir
+private finalorg.apache.hadoop.fs.Path tableDir
 
 
 
@@ -758,7 +762,16 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 fs
-private finalorg.apache.hadoop.fs.FileSystem fs
+private finalorg.apache.hadoop.fs.FileSystem fs
+
+
+
+
+
+
+
+regionDir
+private finalorg.apache.hadoop.fs.Path regionDir
 
 
 
@@ -849,7 +862,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getFileSystem
-publicorg.apache.hadoop.fs.FileSystemgetFileSystem()
+publicorg.apache.hadoop.fs.FileSystemgetFileSystem()
 
 Returns:
 the underlying FileSystem
@@ -862,7 +875,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 

[30/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

2018-03-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/TableNotEnabledException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/TableNotEnabledException.html 
b/apidocs/org/apache/hadoop/hbase/TableNotEnabledException.html
index ead98f8..3387ad0 100644
--- a/apidocs/org/apache/hadoop/hbase/TableNotEnabledException.html
+++ b/apidocs/org/apache/hadoop/hbase/TableNotEnabledException.html
@@ -91,16 +91,16 @@
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">java.lang.Throwable
+https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">java.lang.Throwable
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">java.lang.Exception
+https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">java.lang.Exception
 
 
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">java.io.IOException
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">java.io.IOException
 
 
 org.apache.hadoop.hbase.HBaseIOException
@@ -128,7 +128,7 @@
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable
+https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable
 
 
 
@@ -166,7 +166,7 @@ extends TableNotEnabledException(byte[]tableName)
 
 
-TableNotEnabledException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings)
+TableNotEnabledException(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings)
 Constructor
 
 
@@ -186,15 +186,15 @@ extends 
 
 
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable
-http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#addSuppressed-java.lang.Throwable-;
 title="class or interface in java.lang">addSuppressed, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#fillInStackTrace--;
 title="class or interface in java.lang">fillInStackTrace, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getCause--;
 title="class or interface in java.lang">getCause, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getLocalizedMessage--;
 title="class or interface in java.lang">getLocalizedMessage, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getMessage--;
 title="class or interface in java.lang">getMessage, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getStackTrace--;
 title="class or inter
 face in java.lang">getStackTrace, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getSuppressed--;
 title="class or interface in java.lang">getSuppressed, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#initCause-java.lang.Throwable-;
 title="class or interface in java.lang">initCause, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#printStackTrace--;
 title="class or interface in java.lang">printStackTrace, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#printStackTrace-java.io.PrintStream-;
 title="class or interface in java.lang">printStackTrace, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#printStackTrace-java.io.PrintWriter-;
 title="class or interface in java.lang">printStackTrace, http://docs.oracle.com/javase/8/docs/api/java/lan
 
g/Throwable.html?is-external=true#setStackTrace-java.lang.StackTraceElement:A-" 
title="class or interface in java.lang">setStackTrace, http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#toString--;
 title="class or interface in java.lang">toString
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index 1f9cc78..2a42011 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
  @InterfaceAudience.Public
-public static enum CompareFilter.CompareOp
+public static enum CompareFilter.CompareOp
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumCompareFilter.CompareOp
 Comparison operators. For filters only!
  Use CompareOperator 
otherwise.
@@ -256,7 +256,7 @@ the order they are declared.
 
 
 LESS
-public static finalCompareFilter.CompareOp LESS
+public static finalCompareFilter.CompareOp LESS
 Deprecated.
 less than
 
@@ -267,7 +267,7 @@ the order they are declared.
 
 
 LESS_OR_EQUAL
-public static finalCompareFilter.CompareOp LESS_OR_EQUAL
+public static finalCompareFilter.CompareOp LESS_OR_EQUAL
 Deprecated.
 less than or equal to
 
@@ -278,7 +278,7 @@ the order they are declared.
 
 
 EQUAL
-public static finalCompareFilter.CompareOp EQUAL
+public static finalCompareFilter.CompareOp EQUAL
 Deprecated.
 equals
 
@@ -289,7 +289,7 @@ the order they are declared.
 
 
 NOT_EQUAL
-public static finalCompareFilter.CompareOp NOT_EQUAL
+public static finalCompareFilter.CompareOp NOT_EQUAL
 Deprecated.
 not equal
 
@@ -300,7 +300,7 @@ the order they are declared.
 
 
 GREATER_OR_EQUAL
-public static finalCompareFilter.CompareOp GREATER_OR_EQUAL
+public static finalCompareFilter.CompareOp GREATER_OR_EQUAL
 Deprecated.
 greater than or equal to
 
@@ -311,7 +311,7 @@ the order they are declared.
 
 
 GREATER
-public static finalCompareFilter.CompareOp GREATER
+public static finalCompareFilter.CompareOp GREATER
 Deprecated.
 greater than
 
@@ -322,7 +322,7 @@ the order they are declared.
 
 
 NO_OP
-public static finalCompareFilter.CompareOp NO_OP
+public static finalCompareFilter.CompareOp NO_OP
 Deprecated.
 no operation
 
@@ -341,7 +341,7 @@ the order they are declared.
 
 
 values
-public staticCompareFilter.CompareOp[]values()
+public staticCompareFilter.CompareOp[]values()
 Deprecated.
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
@@ -362,7 +362,7 @@ for (CompareFilter.CompareOp c : 
CompareFilter.CompareOp.values())
 
 
 valueOf
-public staticCompareFilter.CompareOpvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticCompareFilter.CompareOpvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Deprecated.
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
index fec4140..ef9bbfa 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 
@@ -124,16 +124,18 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public abstract class CompareFilter
+public abstract class CompareFilter
 extends FilterBase
 This is a generic filter to be used to filter by 
comparison.  It takes an
  operator (equal, greater, not equal, etc) and a byte [] comparator.
  
  To filter by row key, use RowFilter.
  
+ To filter by column family, use FamilyFilter.
+ 
  To filter by column qualifier, use QualifierFilter.
  
- To filter by value, use SingleColumnValueFilter.
+ To filter by value, use ValueFilter.
  
  These filters can be wrapped with SkipFilter and 
WhileMatchFilter
  to add more control.
@@ -407,7 +409,7 @@ extends 
 
 op
-protectedCompareOperator op
+protectedCompareOperator op
 
 
 
@@ -416,7 +418,7 @@ extends 
 
 comparator
-protectedByteArrayComparable comparator
+protectedByteArrayComparable comparator
 
 
 
@@ -434,7 +436,7 @@ extends 
 CompareFilter
 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferExtendedCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferExtendedCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferExtendedCell.html
index d143ef8..4583895 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferExtendedCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferExtendedCell.html
@@ -258,7 +258,7 @@
 250
 251@Override
 252public long heapSize() {
-253  long sum = HEAP_SIZE_OVERHEAD + 
estimatedHeapSizeOf(cell);
+253  long sum = HEAP_SIZE_OVERHEAD + 
estimatedSizeOfCell(cell);
 254  if (this.tags != null) {
 255sum += 
ClassSize.sizeOf(this.tags);
 256  }
@@ -454,7 +454,7 @@
 446
 447@Override
 448public long heapSize() {
-449  long sum = HEAP_SIZE_OVERHEAD + 
estimatedHeapSizeOf(cell);
+449  long sum = HEAP_SIZE_OVERHEAD + 
estimatedSizeOfCell(cell);
 450  // this.tags is on heap byte[]
 451  if (this.tags != null) {
 452sum += 
ClassSize.sizeOf(this.tags);
@@ -2791,192 +2791,193 @@
 2783   * {@link HeapSize} we call {@link 
HeapSize#heapSize()} so cell can give a correct value. In other
 2784   * cases we just consider the bytes 
occupied by the cell components ie. row, CF, qualifier,
 2785   * timestamp, type, value and tags.
-2786   * @param cell
-2787   * @return estimate of the heap 
space
-2788   */
-2789  public static long 
estimatedHeapSizeOf(final Cell cell) {
-2790if (cell instanceof HeapSize) {
-2791  return ((HeapSize) 
cell).heapSize();
-2792}
-2793// TODO: Add sizing of references 
that hold the row, family, etc., arrays.
-2794return 
estimatedSerializedSizeOf(cell);
-2795  }
-2796
-2797  /**
-2798   * This method exists just to 
encapsulate how we serialize keys. To be replaced by a factory that
-2799   * we query to figure what the Cell 
implementation is and then, what serialization engine to use
-2800   * and further, how to serialize the 
key for inclusion in hfile index. TODO.
-2801   * @param cell
-2802   * @return The key portion of the Cell 
serialized in the old-school KeyValue way or null if passed
-2803   * a null 
codecell/code
-2804   */
-2805  public static byte[] 
getCellKeySerializedAsKeyValueKey(final Cell cell) {
-2806if (cell == null) return null;
-2807byte[] b = new 
byte[KeyValueUtil.keyLength(cell)];
-2808KeyValueUtil.appendKeyTo(cell, b, 
0);
-2809return b;
-2810  }
-2811
-2812  /**
-2813   * Create a Cell that is smaller than 
all other possible Cells for the given Cell's row.
-2814   * @param cell
-2815   * @return First possible Cell on 
passed Cell's row.
-2816   */
-2817  public static Cell 
createFirstOnRow(final Cell cell) {
-2818if (cell instanceof 
ByteBufferExtendedCell) {
-2819  return new 
FirstOnRowByteBufferExtendedCell(
-2820  ((ByteBufferExtendedCell) 
cell).getRowByteBuffer(),
-2821  ((ByteBufferExtendedCell) 
cell).getRowPosition(), cell.getRowLength());
-2822}
-2823return new 
FirstOnRowCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
-2824  }
-2825
-2826  public static Cell 
createFirstOnRow(final byte[] row, int roffset, short rlength) {
-2827return new FirstOnRowCell(row, 
roffset, rlength);
-2828  }
-2829
-2830  public static Cell 
createFirstOnRow(final byte[] row, final byte[] family, final byte[] col) {
-2831return createFirstOnRow(row, 0, 
(short) row.length, family, 0, (byte) family.length, col, 0,
-2832col.length);
-2833  }
-2834
-2835  public static Cell 
createFirstOnRow(final byte[] row, int roffset, short rlength,
-2836  final byte[] family, int foffset, 
byte flength, final byte[] col, int coffset, int clength) {
-2837return new FirstOnRowColCell(row, 
roffset, rlength, family, foffset, flength, col, coffset,
-2838clength);
-2839  }
-2840
-2841  public static Cell 
createFirstOnRow(final byte[] row) {
-2842return createFirstOnRow(row, 0, 
(short) row.length);
-2843  }
-2844
-2845  public static Cell 
createFirstOnRowFamily(Cell cell, byte[] fArray, int foff, int flen) {
-2846if (cell instanceof 
ByteBufferExtendedCell) {
-2847  return new 
FirstOnRowColByteBufferExtendedCell(
-2848  ((ByteBufferExtendedCell) 
cell).getRowByteBuffer(),
-2849  ((ByteBufferExtendedCell) 
cell).getRowPosition(), cell.getRowLength(),
-2850  ByteBuffer.wrap(fArray), foff, 
(byte) flen, HConstants.EMPTY_BYTE_BUFFER, 0, 0);
-2851}
-2852return new 
FirstOnRowColCell(cell.getRowArray(), cell.getRowOffset(), 
cell.getRowLength(),
-2853fArray, foff, (byte) flen, 
HConstants.EMPTY_BYTE_ARRAY, 0, 0);

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
index 00483be..c27b109 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
@@ -396,888 +396,887 @@
 388new 
ConcurrentHashMapCancellableRegionServerCallable, Boolean());
 389this.asyncProcess = asyncProcess;
 390this.errorsByServer = 
createServerErrorTracker();
-391this.errors = 
(asyncProcess.globalErrors != null)
-392? asyncProcess.globalErrors : new 
BatchErrors();
-393this.operationTimeout = 
task.getOperationTimeout();
-394this.rpcTimeout = 
task.getRpcTimeout();
-395this.currentCallable = 
task.getCallable();
-396if (task.getCallable() == null) {
-397  tracker = new 
RetryingTimeTracker().start();
-398}
-399  }
-400
-401  @VisibleForTesting
-402  protected 
SetCancellableRegionServerCallable getCallsInProgress() {
-403return callsInProgress;
-404  }
-405
-406  @VisibleForTesting
-407  SingleServerRequestRunnable 
createSingleServerRequest(MultiAction multiAction, int numAttempt, ServerName 
server,
-408
SetCancellableRegionServerCallable callsInProgress) {
-409return new 
SingleServerRequestRunnable(multiAction, numAttempt, server, 
callsInProgress);
-410  }
-411
-412  /**
-413   * Group a list of actions per region 
servers, and send them.
-414   *
-415   * @param currentActions - the list of 
row to submit
-416   * @param numAttempt - the current 
numAttempt (first attempt is 1)
-417   */
-418  void 
groupAndSendMultiAction(ListAction currentActions, int numAttempt) {
-419MapServerName, MultiAction 
actionsByServer = new HashMap();
-420
-421boolean isReplica = false;
-422ListAction 
unknownReplicaActions = null;
-423for (Action action : currentActions) 
{
-424  RegionLocations locs = 
findAllLocationsOrFail(action, true);
-425  if (locs == null) continue;
-426  boolean isReplicaAction = 
!RegionReplicaUtil.isDefaultReplica(action.getReplicaId());
-427  if (isReplica  
!isReplicaAction) {
-428// This is the property of the 
current implementation, not a requirement.
-429throw new AssertionError("Replica 
and non-replica actions in the same retry");
-430  }
-431  isReplica = isReplicaAction;
-432  HRegionLocation loc = 
locs.getRegionLocation(action.getReplicaId());
-433  if (loc == null || 
loc.getServerName() == null) {
-434if (isReplica) {
-435  if (unknownReplicaActions == 
null) {
-436unknownReplicaActions = new 
ArrayList(1);
-437  }
-438  
unknownReplicaActions.add(action);
-439} else {
-440  // TODO: relies on primary 
location always being fetched
-441  manageLocationError(action, 
null);
-442}
-443  } else {
-444byte[] regionName = 
loc.getRegionInfo().getRegionName();
-445
AsyncProcess.addAction(loc.getServerName(), regionName, action, 
actionsByServer, nonceGroup);
-446  }
-447}
-448boolean doStartReplica = (numAttempt 
== 1  !isReplica  hasAnyReplicaGets);
-449boolean hasUnknown = 
unknownReplicaActions != null  !unknownReplicaActions.isEmpty();
-450
-451if (!actionsByServer.isEmpty()) {
-452  // If this is a first attempt to 
group and send, no replicas, we need replica thread.
-453  sendMultiAction(actionsByServer, 
numAttempt, (doStartReplica  !hasUnknown)
-454  ? currentActions : null, 
numAttempt  1  !hasUnknown);
-455}
-456
-457if (hasUnknown) {
-458  actionsByServer = new 
HashMap();
-459  for (Action action : 
unknownReplicaActions) {
-460HRegionLocation loc = 
getReplicaLocationOrFail(action);
-461if (loc == null) continue;
-462byte[] regionName = 
loc.getRegionInfo().getRegionName();
-463
AsyncProcess.addAction(loc.getServerName(), regionName, action, 
actionsByServer, nonceGroup);
-464  }
-465  if (!actionsByServer.isEmpty()) {
-466sendMultiAction(
-467actionsByServer, numAttempt, 
doStartReplica ? currentActions : null, true);
-468  }
-469}
-470  }
-471
-472  private HRegionLocation 
getReplicaLocationOrFail(Action action) {
-473// We are going to try get location 
once again. For each action, we'll do it once
-474// from cache, because the previous 
calls in the loop might populate it.
-475int replicaId = 
action.getReplicaId();
-476RegionLocations locs = 
findAllLocationsOrFail(action, true);
-477if (locs == null) return null; // 
manageError already called
-478HRegionLocation loc = 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 3c2959e..c233c17 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -405,16 +405,6 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[]row,
-  byte[]family,
-  byte[]qualifier,
-  byte[]value,
-  Deletedelete)
-Deprecated.
-
-
-
-boolean
 Table.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
@@ -425,18 +415,17 @@ service.
 
 
 
-
+
 boolean
-HTable.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
-  CompareFilter.CompareOpcompareOp,
   byte[]value,
   Deletedelete)
 Deprecated.
 
 
-
+
 boolean
 Table.checkAndDelete(byte[]row,
   byte[]family,
@@ -449,18 +438,18 @@ service.
 
 
 
-
+
 boolean
-HTable.checkAndDelete(byte[]row,
+HTable.checkAndDelete(byte[]row,
   byte[]family,
   byte[]qualifier,
-  CompareOperatorop,
+  CompareFilter.CompareOpcompareOp,
   byte[]value,
   Deletedelete)
 Deprecated.
 
 
-
+
 boolean
 Table.checkAndDelete(byte[]row,
   byte[]family,
@@ -473,29 +462,40 @@ service.
 
 
 
+
+boolean
+HTable.checkAndDelete(byte[]row,
+  byte[]family,
+  byte[]qualifier,
+  CompareOperatorop,
+  byte[]value,
+  Deletedelete)
+Deprecated.
+
+
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-RawAsyncTableImpl.delete(Deletedelete)
+AsyncTable.delete(Deletedelete)
+Deletes the specified cells/row.
+
 
 
 void
-HTable.delete(Deletedelete)
-
-
-void
 Table.delete(Deletedelete)
 Deletes the specified cells/row.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 AsyncTableImpl.delete(Deletedelete)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-AsyncTable.delete(Deletedelete)
-Deletes the specified cells/row.
-
+RawAsyncTableImpl.delete(Deletedelete)
+
+
+void
+HTable.delete(Deletedelete)
 
 
 private boolean
@@ -508,19 +508,19 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-RawAsyncTableImpl.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
+AsyncTable.CheckAndMutateBuilder.thenDelete(Deletedelete)
 
 
 boolean
-HTable.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
+Table.CheckAndMutateBuilder.thenDelete(Deletedelete)
 
 
-boolean
-Table.CheckAndMutateBuilder.thenDelete(Deletedelete)
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+RawAsyncTableImpl.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
-AsyncTable.CheckAndMutateBuilder.thenDelete(Deletedelete)
+boolean
+HTable.CheckAndMutateBuilderImpl.thenDelete(Deletedelete)
 
 
 
@@ -533,27 +533,27 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 0c9079d..f713144 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2055,119 +2055,119 @@ service.
 
 
 private TableName
-RegionCoprocessorRpcChannel.table
+SnapshotDescription.table
 
 
 private TableName
-SnapshotDescription.table
+RegionCoprocessorRpcChannel.table
 
 
 private TableName
-HRegionLocator.tableName
+RawAsyncTableImpl.tableName
 
 
 private TableName
-ScannerCallableWithReplicas.tableName
+RegionServerCallable.tableName
 
 
 protected TableName
-ClientScanner.tableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
-AsyncClientScanner.tableName
+BufferedMutatorImpl.tableName
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
+AsyncProcessTask.tableName
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
+AsyncProcessTask.Builder.tableName
 
 
 private TableName
-RegionInfoBuilder.tableName
+AsyncRequestFutureImpl.tableName
 
 
-private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName
+protected TableName
+TableBuilderBase.tableName
 
 
 private TableName
-RawAsyncTableImpl.tableName
+AsyncBatchRpcRetryingCaller.tableName
 
 
 private TableName
-RegionCoprocessorRpcChannelImpl.tableName
+RegionInfoBuilder.tableName
 
 
 private TableName
-AsyncTableRegionLocatorImpl.tableName
+RegionInfoBuilder.MutableRegionInfo.tableName
 
 
-protected TableName
-RegionAdminServiceCallable.tableName
+private TableName
+HTable.tableName
 
 
 private TableName
-HTable.tableName
+TableState.tableName
 
 
-private TableName
-BufferedMutatorImpl.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
-private TableName
-AsyncBatchRpcRetryingCaller.tableName
+protected TableName
+AsyncTableBuilderBase.tableName
 
 
 private TableName
-BufferedMutatorParams.tableName
+AsyncSingleRequestRpcRetryingCaller.tableName
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+ScannerCallableWithReplicas.tableName
 
 
-private TableName
-AsyncRequestFutureImpl.tableName
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
 
 
 private TableName
-AsyncProcessTask.tableName
+AsyncTableRegionLocatorImpl.tableName
 
 
 private TableName
-AsyncProcessTask.Builder.tableName
+HBaseAdmin.TableFuture.tableName
 
 
-protected TableName
-RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
+private TableName
+RegionCoprocessorRpcChannelImpl.tableName
 
 
-private TableName
-RegionServerCallable.tableName
+protected TableName
+ClientScanner.tableName
 
 
 private TableName
-AsyncSingleRequestRpcRetryingCaller.tableName
+BufferedMutatorParams.tableName
 
 
-protected TableName
-TableBuilderBase.tableName
+private TableName
+AsyncClientScanner.tableName
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+private TableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
 
 
-protected TableName
-AsyncTableBuilderBase.tableName
+private TableName
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
 
 
 private TableName
-TableState.tableName
+HRegionLocator.tableName
 
 
 
@@ -2209,83 +2209,83 @@ service.
 
 
 TableName
-AsyncTable.getName()
-Gets the fully qualified table name instance of this 
table.
-
+RawAsyncTableImpl.getName()
 
 
 TableName
-Table.getName()
+RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-HRegionLocator.getName()
+BufferedMutatorImpl.getName()
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the table 
whose region we want to locate.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
 TableName
-AsyncTableImpl.getName()
+HTable.getName()
 
 
 TableName
-RawAsyncTableImpl.getName()
+AsyncBufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this
+ AsyncBufferedMutator writes to.
+
 
 
 TableName
-AsyncTableRegionLocatorImpl.getName()
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
-
+AsyncTableImpl.getName()
 
 
 TableName
-RegionLocator.getName()
+AsyncTable.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-AsyncBufferedMutatorImpl.getName()
+AsyncTableRegionLocatorImpl.getName()
 
 
 TableName
-HTable.getName()
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
index 5ba2deb..024eca4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
@@ -106,11 +106,11 @@
 
 
 private AsyncConnectionImpl
-RawAsyncTableImpl.conn
+AsyncClientScanner.conn
 
 
 private AsyncConnectionImpl
-AsyncBatchRpcRetryingCaller.conn
+AsyncRpcRetryingCallerFactory.conn
 
 
 private AsyncConnectionImpl
@@ -118,19 +118,19 @@
 
 
 private AsyncConnectionImpl
-RegionCoprocessorRpcChannelImpl.conn
+RawAsyncTableImpl.conn
 
 
-protected AsyncConnectionImpl
-AsyncRpcRetryingCaller.conn
+private AsyncConnectionImpl
+RegionCoprocessorRpcChannelImpl.conn
 
 
 private AsyncConnectionImpl
-AsyncClientScanner.conn
+AsyncBatchRpcRetryingCaller.conn
 
 
-private AsyncConnectionImpl
-AsyncRpcRetryingCallerFactory.conn
+protected AsyncConnectionImpl
+AsyncRpcRetryingCaller.conn
 
 
 private AsyncConnectionImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
index e71ca45..d6b1759 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncMasterRequestRpcRetryingCaller.CallableT
-AsyncMasterRequestRpcRetryingCaller.callable
-
-
 private AsyncMasterRequestRpcRetryingCaller.CallableT
 AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.callable
 
+
+private AsyncMasterRequestRpcRetryingCaller.CallableT
+AsyncMasterRequestRpcRetryingCaller.callable
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
index 60fbcff..f31564e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
@@ -106,11 +106,11 @@
 
 
 private AsyncProcess
-BufferedMutatorImpl.ap
+HTableMultiplexer.FlushWorker.ap
 
 
 private AsyncProcess
-HTableMultiplexer.FlushWorker.ap
+BufferedMutatorImpl.ap
 
 
 private AsyncProcess
@@ -137,11 +137,11 @@
 
 
 AsyncProcess
-ClusterConnection.getAsyncProcess()
+ConnectionImplementation.getAsyncProcess()
 
 
 AsyncProcess
-ConnectionImplementation.getAsyncProcess()
+ClusterConnection.getAsyncProcess()
 
 
 (package private) AsyncProcess

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
index c610e19..9a8d746 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
@@ -106,11 +106,11 @@
 
 
 private AsyncRegionLocator
-AsyncConnectionImpl.locator
+AsyncTableRegionLocatorImpl.locator
 
 
 private AsyncRegionLocator
-AsyncTableRegionLocatorImpl.locator
+AsyncConnectionImpl.locator
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
index a970ce5..06fd193 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
@@ -126,13 +126,13 @@
 
 
 
-(package private) AsyncRegistry
-AsyncConnectionImpl.registry
-
-
 private AsyncRegistry
 AsyncMetaRegionLocator.registry
 
+
+(package private) AsyncRegistry
+AsyncConnectionImpl.registry
+
 
 
 


[30/51] [partial] hbase-site git commit: Published site at .

2018-02-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 6410159..428af8c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -239,15 +239,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-ServerMetricsBuilder.serverName
+HRegionLocation.serverName
 
 
 private ServerName
-ServerMetricsBuilder.ServerMetricsImpl.serverName
+ServerMetricsBuilder.serverName
 
 
 private ServerName
-HRegionLocation.serverName
+ServerMetricsBuilder.ServerMetricsImpl.serverName
 
 
 
@@ -306,7 +306,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getMasterName()
+ClusterMetrics.getMasterName()
+Returns detailed information about the current master ServerName.
+
 
 
 ServerName
@@ -316,15 +318,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ClusterMetrics.getMasterName()
-Returns detailed information about the current master ServerName.
-
+ClusterMetricsBuilder.ClusterMetricsImpl.getMasterName()
 
 
 ServerName
-ServerLoad.getServerName()
-Deprecated.
-
+HRegionLocation.getServerName()
 
 
 ServerName
@@ -332,11 +330,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ServerName
-ServerMetricsBuilder.ServerMetricsImpl.getServerName()
+ServerLoad.getServerName()
+Deprecated.
+
 
 
 ServerName
-HRegionLocation.getServerName()
+ServerMetricsBuilder.ServerMetricsImpl.getServerName()
 
 
 ServerName
@@ -405,7 +405,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getBackupMasterNames()
+ClusterMetrics.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -415,7 +415,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getBackupMasterNames()
+ClusterMetricsBuilder.ClusterMetricsImpl.getBackupMasterNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -428,7 +428,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetricsBuilder.ClusterMetricsImpl.getDeadServerNames()
+ClusterMetrics.getDeadServerNames()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
@@ -438,7 +438,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
-ClusterMetrics.getDeadServerNames()
+ClusterMetricsBuilder.ClusterMetricsImpl.getDeadServerNames()
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerLoad
@@ -448,7 +448,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetricsBuilder.ClusterMetricsImpl.getLiveServerMetrics()
+ClusterMetrics.getLiveServerMetrics()
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
@@ -458,7 +458,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
-ClusterMetrics.getLiveServerMetrics()
+ClusterMetricsBuilder.ClusterMetricsImpl.getLiveServerMetrics()
 
 
 static PairRegionInfo,ServerName
@@ -858,31 +858,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ServerName
-FastFailInterceptorContext.server
+AsyncRequestFutureImpl.SingleServerRequestRunnable.server
 
 
 private ServerName

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
index 5ba2deb..024eca4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncConnectionImpl.html
@@ -106,11 +106,11 @@
 
 
 private AsyncConnectionImpl
-RawAsyncTableImpl.conn
+AsyncClientScanner.conn
 
 
 private AsyncConnectionImpl
-AsyncBatchRpcRetryingCaller.conn
+AsyncRpcRetryingCallerFactory.conn
 
 
 private AsyncConnectionImpl
@@ -118,19 +118,19 @@
 
 
 private AsyncConnectionImpl
-RegionCoprocessorRpcChannelImpl.conn
+RawAsyncTableImpl.conn
 
 
-protected AsyncConnectionImpl
-AsyncRpcRetryingCaller.conn
+private AsyncConnectionImpl
+RegionCoprocessorRpcChannelImpl.conn
 
 
 private AsyncConnectionImpl
-AsyncClientScanner.conn
+AsyncBatchRpcRetryingCaller.conn
 
 
-private AsyncConnectionImpl
-AsyncRpcRetryingCallerFactory.conn
+protected AsyncConnectionImpl
+AsyncRpcRetryingCaller.conn
 
 
 private AsyncConnectionImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
index e71ca45..d6b1759 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncMasterRequestRpcRetryingCaller.Callable.html
@@ -105,13 +105,13 @@
 
 
 
-private AsyncMasterRequestRpcRetryingCaller.CallableT
-AsyncMasterRequestRpcRetryingCaller.callable
-
-
 private AsyncMasterRequestRpcRetryingCaller.CallableT
 AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.callable
 
+
+private AsyncMasterRequestRpcRetryingCaller.CallableT
+AsyncMasterRequestRpcRetryingCaller.callable
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
index 60fbcff..f31564e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncProcess.html
@@ -106,11 +106,11 @@
 
 
 private AsyncProcess
-BufferedMutatorImpl.ap
+HTableMultiplexer.FlushWorker.ap
 
 
 private AsyncProcess
-HTableMultiplexer.FlushWorker.ap
+BufferedMutatorImpl.ap
 
 
 private AsyncProcess
@@ -137,11 +137,11 @@
 
 
 AsyncProcess
-ClusterConnection.getAsyncProcess()
+ConnectionImplementation.getAsyncProcess()
 
 
 AsyncProcess
-ConnectionImplementation.getAsyncProcess()
+ClusterConnection.getAsyncProcess()
 
 
 (package private) AsyncProcess

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
index c610e19..9a8d746 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegionLocator.html
@@ -106,11 +106,11 @@
 
 
 private AsyncRegionLocator
-AsyncConnectionImpl.locator
+AsyncTableRegionLocatorImpl.locator
 
 
 private AsyncRegionLocator
-AsyncTableRegionLocatorImpl.locator
+AsyncConnectionImpl.locator
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
index a970ce5..06fd193 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/AsyncRegistry.html
@@ -126,13 +126,13 @@
 
 
 
-(package private) AsyncRegistry
-AsyncConnectionImpl.registry
-
-
 private AsyncRegistry
 AsyncMetaRegionLocator.registry
 
+
+(package private) AsyncRegistry
+AsyncConnectionImpl.registry
+
 
 
 


[30/51] [partial] hbase-site git commit: Published site at .

2018-02-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index f9a42eb..de8b65e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -849,23 +849,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private Connection
-RestoreTablesClient.conn
+BackupAdminImpl.conn
 
 
-protected Connection
-TableBackupClient.conn
-
-
 (package private) Connection
 BackupCommands.Command.conn
 
+
+private Connection
+RestoreTablesClient.conn
+
 
 protected Connection
-BackupManager.conn
+TableBackupClient.conn
 
 
-private Connection
-BackupAdminImpl.conn
+protected Connection
+BackupManager.conn
 
 
 private Connection
@@ -1179,13 +1179,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-(package private) Connection
-ConnectionImplementation.MasterServiceState.connection
-
-
 private Connection
 RegionServerCallable.connection
 
+
+(package private) Connection
+ConnectionImplementation.MasterServiceState.connection
+
 
 
 
@@ -1230,20 +1230,20 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-Connection
-Admin.getConnection()
-
-
 (package private) Connection
 RegionAdminServiceCallable.getConnection()
 
-
+
 protected Connection
 HTable.getConnection()
 INTERNAL Used by unit tests and tools to do 
low-level
  manipulations.
 
 
+
+Connection
+Admin.getConnection()
+
 
 Connection
 HBaseAdmin.getConnection()
@@ -1557,11 +1557,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-TableInputFormatBase.connection
+HRegionPartitioner.connection
 
 
 private Connection
-HRegionPartitioner.connection
+TableInputFormatBase.connection
 
 
 
@@ -1594,22 +1594,22 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
-private Connection
-TableOutputFormat.TableRecordWriter.connection
-
-
 (package private) Connection
 MultiTableOutputFormat.MultiTableRecordWriter.connection
 
+
+private Connection
+HRegionPartitioner.connection
+
 
 private Connection
-TableInputFormatBase.connection
-The underlying Connection 
of the table.
-
+TableOutputFormat.TableRecordWriter.connection
 
 
 private Connection
-HRegionPartitioner.connection
+TableInputFormatBase.connection
+The underlying Connection 
of the table.
+
 
 
 (package private) Connection
@@ -1694,15 +1694,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-CatalogJanitor.connection
+RegionPlacementMaintainer.connection
 
 
 private Connection
-SnapshotOfRegionAssignmentFromMeta.connection
+CatalogJanitor.connection
 
 
 private Connection
-RegionPlacementMaintainer.connection
+SnapshotOfRegionAssignmentFromMeta.connection
 
 
 
@@ -1839,31 +1839,31 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-TableQuotaSnapshotStore.conn
+QuotaObserverChore.conn
 
 
 private Connection
-SpaceQuotaRefresherChore.conn
+QuotaObserverChore.TablesWithQuotas.conn
 
 
 private Connection
-NamespaceQuotaSnapshotStore.conn
+SnapshotQuotaObserverChore.conn
 
 
 private Connection
-SnapshotQuotaObserverChore.conn
+NamespaceQuotaSnapshotStore.conn
 
 
 private Connection
-QuotaObserverChore.conn
+TableQuotaSnapshotStore.conn
 
 
 private Connection
-QuotaObserverChore.TablesWithQuotas.conn
+TableSpaceQuotaSnapshotNotifier.conn
 
 
 private Connection
-TableSpaceQuotaSnapshotNotifier.conn
+SpaceQuotaRefresherChore.conn
 
 
 private Connection
@@ -2197,11 +2197,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Connection
-RegionCoprocessorHost.RegionEnvironment.createConnection(org.apache.hadoop.conf.Configurationconf)
+HRegionServer.createConnection(org.apache.hadoop.conf.Configurationconf)
 
 
 Connection
-HRegionServer.createConnection(org.apache.hadoop.conf.Configurationconf)
+RegionCoprocessorHost.RegionEnvironment.createConnection(org.apache.hadoop.conf.Configurationconf)
 
 
 Connection
@@ -2209,11 +2209,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Connection
-RegionCoprocessorHost.RegionEnvironment.getConnection()
+HRegionServer.getConnection()
 
 
 Connection
-HRegionServer.getConnection()
+RegionCoprocessorHost.RegionEnvironment.getConnection()
 
 
 
@@ -2247,11 +2247,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-HFileReplicator.connection
+ReplicationSourceManager.connection
 
 
 private Connection
-ReplicationSourceManager.connection
+HFileReplicator.connection
 
 
 private 

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
index ad601c4..53e455f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
@@ -1117,1183 +1117,1186 @@
 1109  @Nullable
 1110  public static TableState 
getTableState(Connection conn, TableName tableName)
   throws IOException {
-1112Table metaHTable = 
getMetaHTable(conn);
-1113Get get = new 
Get(tableName.getName()).addColumn(getTableFamily(), getTableStateColumn());
-1114long time = 
EnvironmentEdgeManager.currentTime();
-1115get.setTimeRange(0, time);
-1116Result result =
-1117metaHTable.get(get);
-1118return getTableState(result);
-1119  }
-1120
-1121  /**
-1122   * Fetch table states from META 
table
-1123   * @param conn connection to use
-1124   * @return map {tableName -gt; 
state}
-1125   * @throws IOException
-1126   */
-1127  public static MapTableName, 
TableState getTableStates(Connection conn)
-1128  throws IOException {
-1129final MapTableName, 
TableState states = new LinkedHashMap();
-1130Visitor collector = new Visitor() 
{
-1131  @Override
-1132  public boolean visit(Result r) 
throws IOException {
-1133TableState state = 
getTableState(r);
-1134if (state != null)
-1135  
states.put(state.getTableName(), state);
-1136return true;
-1137  }
-1138};
-1139fullScanTables(conn, collector);
-1140return states;
-1141  }
-1142
-1143  /**
-1144   * Updates state in META
-1145   * @param conn connection to use
-1146   * @param tableName table to look 
for
-1147   * @throws IOException
-1148   */
-1149  public static void 
updateTableState(Connection conn, TableName tableName,
-1150  TableState.State actual) throws 
IOException {
-1151updateTableState(conn, new 
TableState(tableName, actual));
-1152  }
-1153
-1154  /**
-1155   * Decode table state from META 
Result.
-1156   * Should contain cell from 
HConstants.TABLE_FAMILY
-1157   * @param r result
-1158   * @return null if not found
-1159   * @throws IOException
-1160   */
-1161  @Nullable
-1162  public static TableState 
getTableState(Result r)
-1163  throws IOException {
-1164Cell cell = 
r.getColumnLatestCell(getTableFamily(), getTableStateColumn());
-1165if (cell == null) return null;
-1166try {
-1167  return 
TableState.parseFrom(TableName.valueOf(r.getRow()),
-1168  
Arrays.copyOfRange(cell.getValueArray(),
-1169  cell.getValueOffset(), 
cell.getValueOffset() + cell.getValueLength()));
-1170} catch (DeserializationException e) 
{
-1171  throw new IOException(e);
-1172}
-1173
-1174  }
-1175
-1176  /**
-1177   * Implementations 'visit' a catalog 
table row.
-1178   */
-1179  public interface Visitor {
-1180/**
-1181 * Visit the catalog table row.
-1182 * @param r A row from catalog 
table
-1183 * @return True if we are to proceed 
scanning the table, else false if
-1184 * we are to stop now.
-1185 */
-1186boolean visit(final Result r) throws 
IOException;
-1187  }
-1188
-1189  /**
-1190   * Implementations 'visit' a catalog 
table row but with close() at the end.
-1191   */
-1192  public interface CloseableVisitor 
extends Visitor, Closeable {
-1193  }
-1194
-1195  /**
-1196   * A {@link Visitor} that collects 
content out of passed {@link Result}.
-1197   */
-1198  static abstract class 
CollectingVisitorT implements Visitor {
-1199final ListT results = new 
ArrayList();
-1200@Override
-1201public boolean visit(Result r) 
throws IOException {
-1202  if (r ==  null || r.isEmpty()) 
return true;
-1203  add(r);
-1204  return true;
-1205}
-1206
-1207abstract void add(Result r);
-1208
-1209/**
-1210 * @return Collected results; wait 
till visits complete to collect all
-1211 * possible results
-1212 */
-1213ListT getResults() {
-1214  return this.results;
-1215}
-1216  }
-1217
-1218  /**
-1219   * Collects all returned.
-1220   */
-1221  static class CollectAllVisitor extends 
CollectingVisitorResult {
-1222@Override
-1223void add(Result r) {
-1224  this.results.add(r);
-1225}
-1226  }
-1227
-1228  /**
-1229   * A Visitor that skips offline 
regions and split parents
-1230   */
-1231  public static abstract class 
DefaultVisitorBase implements Visitor {
-1232
-1233public DefaultVisitorBase() {
-1234  super();
-1235}
-1236
-1237public abstract boolean 
visitInternal(Result rowResult) throws IOException;
-1238
-1239@Override
-1240   

[30/51] [partial] hbase-site git commit: Published site at .

2018-02-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/org/apache/hadoop/hbase/replication/regionserver/Replication.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/Replication.html 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/Replication.html
index 127069d..e94581b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/Replication.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/Replication.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":9,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -109,15 +109,15 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-ReplicationService, ReplicationSinkService, ReplicationSourceService, WALActionsListener
+ReplicationService, ReplicationSinkService, ReplicationSourceService
 
 
 
 @InterfaceAudience.Private
-public class Replication
+public class Replication
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements ReplicationSourceService, ReplicationSinkService, WALActionsListener
-Gateway to Replication.  Used by HRegionServer.
+implements ReplicationSourceService, ReplicationSinkService
+Gateway to Replication. Used by HRegionServer.
 
 
 
@@ -137,8 +137,10 @@ implements Class and Description
 
 
-(package private) static class
-Replication.ReplicationStatisticsThread
+private static class
+Replication.ReplicationStatisticsTask
+Statistics task.
+
 
 
 
@@ -160,21 +162,21 @@ implements conf
 
 
+private boolean
+isReplicationForBulkLoadDataEnabled
+
+
 private static org.slf4j.Logger
 LOG
 
-
+
 private PeerProcedureHandler
 peerProcedureHandler
 
-
+
 private ReplicationQueueStorage
 queueStorage
 
-
-private boolean
-replicationForBulkLoadData
-
 
 private ReplicationLoad
 replicationLoad
@@ -228,14 +230,6 @@ implements Empty constructor
 
 
-
-Replication(Serverserver,
-   org.apache.hadoop.fs.FileSystemfs,
-   org.apache.hadoop.fs.PathlogDir,
-   org.apache.hadoop.fs.PatholdLogDir)
-Instantiate the replication management (if rep is 
enabled).
-
-
 
 
 
@@ -246,7 +240,7 @@ implements 
-All MethodsStatic MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
@@ -262,77 +256,40 @@ implements buildReplicationLoad()
 
 
-static void
-decorateMasterConfiguration(org.apache.hadoop.conf.Configurationconf)
-This method modifies the master's configuration in order to 
inject replication-related features
-
-
-
-static void
-decorateRegionServerConfiguration(org.apache.hadoop.conf.Configurationconf)
-This method modifies the region server's configuration in 
order to inject replication-related
- features
-
-
-
 PeerProcedureHandler
 getPeerProcedureHandler()
 Returns a Handler to handle peer procedures.
 
 
-
+
 ReplicationSourceManager
 getReplicationManager()
 Get the replication sources manager
 
 
-
-WALActionsListener
-getWALActionsListener()
-Returns a WALObserver for the service.
-
-
-
+
 void
-initialize(Serverserver,
+initialize(Serverserver,
   org.apache.hadoop.fs.FileSystemfs,
   org.apache.hadoop.fs.PathlogDir,
   org.apache.hadoop.fs.PatholdLogDir,
-  WALFileLengthProviderwalFileLengthProvider)
+  WALProviderwalProvider)
 Initializes the replication service object.
 
 
-
-static boolean
-isReplicationForBulkLoadDataEnabled(org.apache.hadoop.conf.Configurationc)
-
-
+
 void
 join()
 Join with the replication threads
 
 
-
-void
-postLogRoll(org.apache.hadoop.fs.PatholdPath,
-   org.apache.hadoop.fs.PathnewPath)
-The WAL has been rolled.
-
-
-
-void
-preLogRoll(org.apache.hadoop.fs.PatholdPath,
-  org.apache.hadoop.fs.PathnewPath)
-The WAL is going to be rolled.
-
-
-
+
 ReplicationLoad
 refreshAndGetReplicationLoad()
 Refresh and Get ReplicationLoad
 
 
-
+
 void
 replicateLogEntries(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryentries,
CellScannercells,
@@ -342,33 +299,19 @@ implements Carry on the list of log entries 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/TestMetaTableLocator.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/TestMetaTableLocator.html 
b/testdevapidocs/org/apache/hadoop/hbase/TestMetaTableLocator.html
index baf98d0..c8c1270 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/TestMetaTableLocator.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/TestMetaTableLocator.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestMetaTableLocator
+public class TestMetaTableLocator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Test MetaTableLocator
 
@@ -156,18 +156,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 abortable
 
 
+static HBaseClassTestRule
+CLASS_RULE
+
+
 private static org.slf4j.Logger
 LOG
 
-
+
 private static 
org.apache.hadoop.hbase.ServerName
 SN
 
-
+
 private static HBaseTestingUtility
 UTIL
 
-
+
 private 
org.apache.hadoop.hbase.zookeeper.ZKWatcher
 watcher
 
@@ -296,13 +300,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Field Detail
+
+
+
+
+
+CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
+
+
 
 
 
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -311,7 +324,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 UTIL
-private static finalHBaseTestingUtility UTIL
+private static finalHBaseTestingUtility UTIL
 
 
 
@@ -320,7 +333,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SN
-private static finalorg.apache.hadoop.hbase.ServerName SN
+private static finalorg.apache.hadoop.hbase.ServerName SN
 
 
 
@@ -329,7 +342,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 watcher
-privateorg.apache.hadoop.hbase.zookeeper.ZKWatcher watcher
+privateorg.apache.hadoop.hbase.zookeeper.ZKWatcher watcher
 
 
 
@@ -338,7 +351,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 abortable
-privateorg.apache.hadoop.hbase.Abortable abortable
+privateorg.apache.hadoop.hbase.Abortable abortable
 
 
 
@@ -355,7 +368,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TestMetaTableLocator
-publicTestMetaTableLocator()
+publicTestMetaTableLocator()
 
 
 
@@ -372,7 +385,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 beforeClass
-public staticvoidbeforeClass()
+public staticvoidbeforeClass()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -386,7 +399,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 afterClass
-public staticvoidafterClass()
+public staticvoidafterClass()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -400,7 +413,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 before
-publicvoidbefore()
+publicvoidbefore()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -414,7 +427,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 after
-publicvoidafter()
+publicvoidafter()
 
 
 
@@ -423,7 +436,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testMetaLookup
-publicvoidtestMetaLookup()
+publicvoidtestMetaLookup()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,

org.apache.hbase.thirdparty.com.google.protobuf.ServiceException,
@@ -444,7 +457,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testInterruptWaitOnMeta
-publicvoidtestInterruptWaitOnMeta()
+publicvoidtestInterruptWaitOnMeta()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
 http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,
 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException
@@ -463,7 +476,7 @@ extends 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
index ca8be5e..b8e6dfa 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
@@ -6398,514 +6398,514 @@
 6390  int initialBatchProgress = 
scannerContext.getBatchProgress();
 6391  long initialSizeProgress = 
scannerContext.getDataSizeProgress();
 6392  long initialHeapSizeProgress = 
scannerContext.getHeapSizeProgress();
-6393  long initialTimeProgress = 
scannerContext.getTimeProgress();
-6394
-6395  // The loop here is used only when 
at some point during the next we determine
-6396  // that due to effects of filters 
or otherwise, we have an empty row in the result.
-6397  // Then we loop and try again. 
Otherwise, we must get out on the first iteration via return,
-6398  // "true" if there's more data to 
read, "false" if there isn't (storeHeap is at a stop row,
-6399  // and joinedHeap has no more data 
to read for the last row (if set, joinedContinuationRow).
-6400  while (true) {
-6401// Starting to scan a new row. 
Reset the scanner progress according to whether or not
-6402// progress should be kept.
-6403if 
(scannerContext.getKeepProgress()) {
-6404  // Progress should be kept. 
Reset to initial values seen at start of method invocation.
-6405  
scannerContext.setProgress(initialBatchProgress, initialSizeProgress,
-6406  initialHeapSizeProgress, 
initialTimeProgress);
-6407} else {
-6408  
scannerContext.clearProgress();
-6409}
-6410if (rpcCall.isPresent()) {
-6411  // If a user specifies a 
too-restrictive or too-slow scanner, the
-6412  // client might time out and 
disconnect while the server side
-6413  // is still processing the 
request. We should abort aggressively
-6414  // in that case.
-6415  long afterTime = 
rpcCall.get().disconnectSince();
-6416  if (afterTime = 0) {
-6417throw new 
CallerDisconnectedException(
-6418"Aborting on region " + 
getRegionInfo().getRegionNameAsString() + ", call " +
-6419this + " after " + 
afterTime + " ms, since " +
-6420"caller 
disconnected");
-6421  }
-6422}
-6423
-6424// Let's see what we have in the 
storeHeap.
-6425Cell current = 
this.storeHeap.peek();
-6426
-6427boolean shouldStop = 
shouldStop(current);
-6428// When has filter row is true 
it means that the all the cells for a particular row must be
-6429// read before a filtering 
decision can be made. This means that filters where hasFilterRow
-6430// run the risk of 
enLongAddering out of memory errors in the case that they are applied to a
-6431// table that has very large 
rows.
-6432boolean hasFilterRow = 
this.filter != null  this.filter.hasFilterRow();
-6433
-6434// If filter#hasFilterRow is 
true, partial results are not allowed since allowing them
-6435// would prevent the filters 
from being evaluated. Thus, if it is true, change the
-6436// scope of any limits that 
could potentially create partial results to
-6437// LimitScope.BETWEEN_ROWS so 
that those limits are not reached mid-row
-6438if (hasFilterRow) {
-6439  if (LOG.isTraceEnabled()) {
-6440
LOG.trace("filter#hasFilterRow is true which prevents partial results from 
being "
-6441+ " formed. Changing 
scope of limits that may create partials");
-6442  }
-6443  
scannerContext.setSizeLimitScope(LimitScope.BETWEEN_ROWS);
-6444  
scannerContext.setTimeLimitScope(LimitScope.BETWEEN_ROWS);
-6445}
-6446
-6447// Check if we were getting data 
from the joinedHeap and hit the limit.
-6448// If not, then it's main path - 
getting results from storeHeap.
-6449if (joinedContinuationRow == 
null) {
-6450  // First, check if we are at a 
stop row. If so, there are no more results.
-6451  if (shouldStop) {
-6452if (hasFilterRow) {
-6453  
filter.filterRowCells(results);
-6454}
-6455return 
scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
-6456  }
-6457
-6458  // Check if rowkey filter 
wants to exclude this row. If so, loop to next.
-6459  // Technically, if we hit 
limits before on this row, we don't need this call.
-6460  if (filterRowKey(current)) {

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/filter/FilterWrapper.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/FilterWrapper.html 
b/devapidocs/org/apache/hadoop/hbase/filter/FilterWrapper.html
index 6bc8595..d7d7ff5 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/FilterWrapper.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/FilterWrapper.html
@@ -612,7 +612,7 @@ public
 
 filterCell
-publicFilter.ReturnCodefilterCell(Cellc)
+publicFilter.ReturnCodefilterCell(Cellc)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:Filter
 A way to filter based on the column family, column 
qualifier and/or the column value. Return
@@ -649,7 +649,7 @@ public
 
 transformCell
-publicCelltransformCell(Cellv)
+publicCelltransformCell(Cellv)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:Filter
 Give the filter a chance to transform the passed KeyValue. 
If the Cell is changed a new
@@ -680,7 +680,7 @@ public
 
 hasFilterRow
-publicbooleanhasFilterRow()
+publicbooleanhasFilterRow()
 Description copied from 
class:Filter
 Primarily used to check for conflicts with scans(such as 
scans that do not read a full row at a
  time).
@@ -698,7 +698,7 @@ public
 
 filterRowCells
-publicvoidfilterRowCells(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellkvs)
+publicvoidfilterRowCells(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellkvs)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:Filter
 Chance to alter the list of Cells to be submitted. 
Modifications to the list will carry on
@@ -721,7 +721,7 @@ public
 
 filterRowCellsWithRet
-publicFilterWrapper.FilterRowRetCodefilterRowCellsWithRet(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellkvs)
+publicFilterWrapper.FilterRowRetCodefilterRowCellsWithRet(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellkvs)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -735,7 +735,7 @@ public
 
 isFamilyEssential
-publicbooleanisFamilyEssential(byte[]name)
+publicbooleanisFamilyEssential(byte[]name)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:Filter
 Check that given column family is essential for filter to 
check row. Most filters always return
@@ -759,7 +759,7 @@ public
 
 areSerializedFieldsEqual
-booleanareSerializedFieldsEqual(Filtero)
+booleanareSerializedFieldsEqual(Filtero)
 Description copied from 
class:Filter
 Concrete implementers can signal a failure condition in 
their code by throwing an
  http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 1887530..509b010 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -183,13 +183,13 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
index b50a65f..7271567 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.html
@@ -1718,312 +1718,314 @@
 1710
 1711public WorkerThread(final 
ThreadGroup group) {
 1712  super(group, "ProcExecWrkr-" + 
workerId.incrementAndGet());
-1713}
-1714
-1715@Override
-1716public void sendStopSignal() {
-1717  scheduler.signalAll();
-1718}
-1719
-1720@Override
-1721public void run() {
-1722  long lastUpdate = 
EnvironmentEdgeManager.currentTime();
-1723  try {
-1724while (isRunning()  
keepAlive(lastUpdate)) {
-1725  this.activeProcedure = 
scheduler.poll(keepAliveTime, TimeUnit.MILLISECONDS);
-1726  if (this.activeProcedure == 
null) continue;
-1727  int activeCount = 
activeExecutorCount.incrementAndGet();
-1728  int runningCount = 
store.setRunningProcedureCount(activeCount);
-1729  if (LOG.isTraceEnabled()) {
-1730LOG.trace("Execute pid=" + 
this.activeProcedure.getProcId() +
-1731" runningCount=" + 
runningCount + ", activeCount=" + activeCount);
-1732  }
-1733  
executionStartTime.set(EnvironmentEdgeManager.currentTime());
-1734  try {
-1735
executeProcedure(this.activeProcedure);
-1736  } catch (AssertionError e) {
-1737LOG.info("ASSERT pid=" + 
this.activeProcedure.getProcId(), e);
-1738throw e;
-1739  } finally {
-1740activeCount = 
activeExecutorCount.decrementAndGet();
-1741runningCount = 
store.setRunningProcedureCount(activeCount);
-1742if (LOG.isTraceEnabled()) 
{
-1743  LOG.trace("Halt pid=" + 
this.activeProcedure.getProcId() +
-1744  " runningCount=" + 
runningCount + ", activeCount=" + activeCount);
-1745}
-1746this.activeProcedure = 
null;
-1747lastUpdate = 
EnvironmentEdgeManager.currentTime();
-1748
executionStartTime.set(Long.MAX_VALUE);
-1749  }
-1750}
-1751  } catch (Throwable t) {
-1752LOG.warn("Worker terminating 
UNNATURALLY " + this.activeProcedure, t);
-1753  } finally {
-1754LOG.debug("Worker 
terminated.");
-1755  }
-1756  workerThreads.remove(this);
-1757}
-1758
-1759@Override
-1760public String toString() {
-1761  Procedure? p = 
this.activeProcedure;
-1762  return getName() + "(pid=" + (p == 
null? Procedure.NO_PROC_ID: p.getProcId() + ")");
-1763}
-1764
-1765/**
-1766 * @return the time since the 
current procedure is running
-1767 */
-1768public long getCurrentRunTime() {
-1769  return 
EnvironmentEdgeManager.currentTime() - executionStartTime.get();
-1770}
-1771
-1772private boolean keepAlive(final long 
lastUpdate) {
-1773  if (workerThreads.size() = 
corePoolSize) return true;
-1774  return 
(EnvironmentEdgeManager.currentTime() - lastUpdate)  keepAliveTime;
-1775}
-1776  }
-1777
-1778  /**
-1779   * Runs task on a period such as check 
for stuck workers.
-1780   * @see InlineChore
-1781   */
-1782  private final class 
TimeoutExecutorThread extends StoppableThread {
-1783private final 
DelayQueueDelayedWithTimeout queue = new DelayQueue();
-1784
-1785public TimeoutExecutorThread(final 
ThreadGroup group) {
-1786  super(group, "ProcExecTimeout");
-1787}
-1788
-1789@Override
-1790public void sendStopSignal() {
-1791  
queue.add(DelayedUtil.DELAYED_POISON);
-1792}
-1793
-1794@Override
-1795public void run() {
-1796  final boolean traceEnabled = 
LOG.isTraceEnabled();
-1797  while (isRunning()) {
-1798final DelayedWithTimeout task = 
DelayedUtil.takeWithoutInterrupt(queue);
-1799if (task == null || task == 
DelayedUtil.DELAYED_POISON) {
-1800  // the executor may be 
shutting down,
-1801  // and the task is just the 
shutdown request
-1802  continue;
-1803}
-1804
-1805if (traceEnabled) {
-1806  LOG.trace("Executing " + 
task);
-1807}
-1808
-1809// execute the task
-1810if (task instanceof InlineChore) 
{
-1811  
execInlineChore((InlineChore)task);
-1812} else if (task instanceof 
DelayedProcedure) {
-1813  
execDelayedProcedure((DelayedProcedure)task);
-1814} else {
-1815  LOG.error("CODE-BUG unknown 
timeout task type " + task);
-1816}
-1817  }
-1818}
-1819
-1820public void 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.ZKTask.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.ZKTask.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.ZKTask.html
index 5b66298..ea864e9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.ZKTask.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.ZKTask.html
@@ -39,319 +39,329 @@
 031import 
java.util.concurrent.atomic.AtomicBoolean;
 032
 033import 
org.apache.hadoop.conf.Configuration;
-034import 
org.apache.yetus.audience.InterfaceAudience;
-035import 
org.apache.zookeeper.KeeperException;
-036import 
org.apache.zookeeper.KeeperException.Code;
-037import org.apache.zookeeper.ZooKeeper;
-038import org.apache.zookeeper.data.Stat;
-039import org.slf4j.Logger;
-040import org.slf4j.LoggerFactory;
-041import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * A very simple read only zookeeper 
implementation without watcher support.
-045 */
-046@InterfaceAudience.Private
-047public final class ReadOnlyZKClient 
implements Closeable {
-048
-049  private static final Logger LOG = 
LoggerFactory.getLogger(ReadOnlyZKClient.class);
-050
-051  public static final String 
RECOVERY_RETRY = "zookeeper.recovery.retry";
-052
-053  private static final int 
DEFAULT_RECOVERY_RETRY = 30;
-054
-055  public static final String 
RECOVERY_RETRY_INTERVAL_MILLIS =
-056  
"zookeeper.recovery.retry.intervalmill";
-057
-058  private static final int 
DEFAULT_RECOVERY_RETRY_INTERVAL_MILLIS = 1000;
-059
-060  public static final String 
KEEPALIVE_MILLIS = "zookeeper.keep-alive.time";
-061
-062  private static final int 
DEFAULT_KEEPALIVE_MILLIS = 6;
-063
-064  private static final 
EnumSetCode FAIL_FAST_CODES = EnumSet.of(Code.NOAUTH, 
Code.AUTHFAILED);
-065
-066  private final String connectString;
-067
-068  private final int sessionTimeoutMs;
-069
-070  private final int maxRetries;
-071
-072  private final int retryIntervalMs;
-073
-074  private final int keepAliveTimeMs;
-075
-076  private static abstract class Task 
implements Delayed {
-077
-078protected long time = 
System.nanoTime();
-079
-080public boolean needZk() {
-081  return false;
-082}
-083
-084public void exec(ZooKeeper zk) {
-085}
-086
-087public void connectFailed(IOException 
e) {
-088}
-089
-090public void closed(IOException e) {
-091}
-092
-093@Override
-094public int compareTo(Delayed o) {
-095  Task that = (Task) o;
-096  int c = Long.compare(time, 
that.time);
-097  if (c != 0) {
-098return c;
-099  }
-100  return 
Integer.compare(System.identityHashCode(this), 
System.identityHashCode(that));
-101}
-102
-103@Override
-104public long getDelay(TimeUnit unit) 
{
-105  return unit.convert(time - 
System.nanoTime(), TimeUnit.NANOSECONDS);
-106}
-107  }
-108
-109  private static final Task CLOSE = new 
Task() {
-110  };
-111
-112  private final DelayQueueTask 
tasks = new DelayQueue();
-113
-114  private final AtomicBoolean closed = 
new AtomicBoolean(false);
-115
-116  private ZooKeeper zookeeper;
-117
-118  private String getId() {
-119return String.format("0x%08x", 
System.identityHashCode(this));
-120  }
-121
-122  public ReadOnlyZKClient(Configuration 
conf) {
-123this.connectString = 
ZKConfig.getZKQuorumServersString(conf);
-124this.sessionTimeoutMs = 
conf.getInt(ZK_SESSION_TIMEOUT, DEFAULT_ZK_SESSION_TIMEOUT);
-125this.maxRetries = 
conf.getInt(RECOVERY_RETRY, DEFAULT_RECOVERY_RETRY);
-126this.retryIntervalMs =
-127
conf.getInt(RECOVERY_RETRY_INTERVAL_MILLIS, 
DEFAULT_RECOVERY_RETRY_INTERVAL_MILLIS);
-128this.keepAliveTimeMs = 
conf.getInt(KEEPALIVE_MILLIS, DEFAULT_KEEPALIVE_MILLIS);
-129LOG.info("Start read only zookeeper 
connection " + getId() + " to " + connectString +
-130", session timeout " + 
sessionTimeoutMs + " ms, retries " + maxRetries +
-131", retry interval " + 
retryIntervalMs + " ms, keep alive " + keepAliveTimeMs + " ms");
-132Thread t = new Thread(this::run, 
"ReadOnlyZKClient");
-133t.setDaemon(true);
-134t.start();
-135  }
-136
-137  private abstract class ZKTaskT 
extends Task {
-138
-139protected final String path;
-140
-141private final 
CompletableFutureT future;
-142
-143private final String operationType;
-144
-145private int retries;
-146
-147protected ZKTask(String path, 
CompletableFutureT future, String operationType) {
-148  this.path = path;
-149  this.future = future;
-150  this.operationType = 
operationType;
-151}
-152
-153protected final void 
onComplete(ZooKeeper zk, int rc, T ret, boolean errorIfNoNode) {
-154  

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index b14bbed..93cf760 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -1490,9 +1490,9 @@
 1482// unless I pass along via the 
conf.
 1483Configuration confForWAL = new 
Configuration(c);
 1484confForWAL.set(HConstants.HBASE_DIR, 
rootdir.toString());
-1485WAL wal = (new 
WALFactory(confForWAL,
-1486
Collections.WALActionsListener singletonList(new MetricsWAL()), 
walFactoryID))
-1487
.getWAL(metaHRI.getEncodedNameAsBytes(), metaHRI.getTable().getNamespace());
+1485WAL wal =
+1486  new WALFactory(confForWAL, 
Collections.WALActionsListener singletonList(new MetricsWAL()),
+1487  
walFactoryID).getWAL(metaHRI);
 1488HRegion meta = 
HRegion.createHRegion(metaHRI, rootdir, c, metaDescriptor, wal);
 1489
MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, true);
 1490return meta;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
index b14bbed..93cf760 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
@@ -1490,9 +1490,9 @@
 1482// unless I pass along via the 
conf.
 1483Configuration confForWAL = new 
Configuration(c);
 1484confForWAL.set(HConstants.HBASE_DIR, 
rootdir.toString());
-1485WAL wal = (new 
WALFactory(confForWAL,
-1486
Collections.WALActionsListener singletonList(new MetricsWAL()), 
walFactoryID))
-1487
.getWAL(metaHRI.getEncodedNameAsBytes(), metaHRI.getTable().getNamespace());
+1485WAL wal =
+1486  new WALFactory(confForWAL, 
Collections.WALActionsListener singletonList(new MetricsWAL()),
+1487  
walFactoryID).getWAL(metaHRI);
 1488HRegion meta = 
HRegion.createHRegion(metaHRI, rootdir, c, metaDescriptor, wal);
 1489
MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, true);
 1490return meta;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index b14bbed..93cf760 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -1490,9 +1490,9 @@
 1482// unless I pass along via the 
conf.
 1483Configuration confForWAL = new 
Configuration(c);
 1484confForWAL.set(HConstants.HBASE_DIR, 
rootdir.toString());
-1485WAL wal = (new 
WALFactory(confForWAL,
-1486
Collections.WALActionsListener singletonList(new MetricsWAL()), 
walFactoryID))
-1487
.getWAL(metaHRI.getEncodedNameAsBytes(), metaHRI.getTable().getNamespace());
+1485WAL wal =
+1486  new WALFactory(confForWAL, 
Collections.WALActionsListener singletonList(new MetricsWAL()),
+1487  
walFactoryID).getWAL(metaHRI);
 1488HRegion meta = 
HRegion.createHRegion(metaHRI, rootdir, c, metaDescriptor, wal);
 1489
MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, true);
 1490return meta;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
index b14bbed..93cf760 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
@@ -1490,9 +1490,9 @@
 1482// unless I pass along via the 
conf.
 1483Configuration confForWAL = new 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
index 1db8dc5..eb12d14 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":50,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":50,"i13":50,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":38,"i26":38,"i27":38,"i28":38,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":50,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":38,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":38,"i55":6,"i56":6,"i57":38,"i58":38,"i59":6,"i60":38,"i61":6,"i62":6,"i63":6,"i64":6,"i65":38,"i66":38,"i67":50,"i68":6,"i69":6,"i70":6,"i71":6,"i72":38,"i73":38,"i74":6,"i75":50,"i76":6,"i77":6,"i78":6,"i79":38,"i80":38,"i81":18,"i82":18,"i83":6,"i84":50,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":18,"i95":18,"i96":50,"i97":18,"i98":6,"i99":38,"i100":6,"i101":6,"i102":6,"i103":38,"i104":18,"i105":6,"i106":6,"i107":6,"i108":18,"i109":6,"i110":6,"i111":38,"i112":38,"i113":38,"i114":38,"i115":6,"i116":6,"i
 
117":6,"i118":6,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":50,"i126":6,"i127":38,"i128":6,"i129":6,"i130":18,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":6,"i137":6,"i138":38,"i139":6,"i140":6,"i141":6,"i142":6,"i143":6,"i144":38,"i145":6,"i146":6,"i147":6,"i148":38,"i149":38,"i150":6,"i151":38,"i152":38,"i153":38,"i154":38,"i155":38,"i156":6,"i157":38,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":38,"i166":6,"i167":6,"i168":6,"i169":50,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":38,"i176":6,"i177":38,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":6,"i189":6,"i190":6,"i191":6,"i192":6,"i193":50,"i194":6,"i195":50,"i196":50,"i197":50,"i198":6,"i199":50,"i200":6,"i201":6,"i202":6,"i203":6,"i204":6,"i205":6,"i206":6,"i207":6,"i208":38,"i209":38,"i210":6,"i211":6,"i212":6,"i213":6,"i214":6,"i215":50,"i216":6,"i217":6,"i218":6,"i219":6,"i220":6,"i221":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":50,"i4":6,"i5":6,"i6":18,"i7":6,"i8":18,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":50,"i15":50,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":38,"i28":38,"i29":38,"i30":38,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":38,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":38,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":38,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":38,"i69":38,"i70":50,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":38,"i77":38,"i78":6,"i79":50,"i80":6,"i81":6,"i82":6,"i83":38,"i84":38,"i85":18,"i86":18,"i87":6,"i88":50,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":18,"i99":18,"i100":50,"i101":18,"i102":6,"i103":38,"i104":6,"i105":6,"i106":6,"i107":38,"i108":18,"i109":6,"i110":6,"i111":6,"i112":18,"i113":6,"i114":6,"i115":38,"i116":38,"i1
 
17":38,"i118":38,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":50,"i130":6,"i131":38,"i132":6,"i133":6,"i134":18,"i135":6,"i136":6,"i137":6,"i138":6,"i139":6,"i140":6,"i141":6,"i142":38,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":38,"i149":6,"i150":6,"i151":6,"i152":38,"i153":38,"i154":6,"i155":38,"i156":38,"i157":38,"i158":38,"i159":38,"i160":6,"i161":38,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":38,"i170":6,"i171":6,"i172":6,"i173":50,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":38,"i180":6,"i181":38,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":6,"i189":6,"i190":6,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":50,"i199":6,"i200":50,"i201":50,"i202":50,"i203":6,"i204":50,"i205":6,"i206":6,"i207":6,"i208":6,"i209":6,"i210":6,"i211":6,"i212":6,"i213":38,"i214":38,"i215":6,"i216":6,"i217":6,"i218":6,"i219":6,"i220":50,"i221":6,"i222":6,"i223":6,"i224":6,"
 i225":6,"i226":6,"i227":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -204,31 +204,46 @@ extends 
+default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
index 29c4f48..81617c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-protected static class HBaseAdmin.ProcedureFutureV
+protected static class HBaseAdmin.ProcedureFutureV
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
 Future that waits on a procedure result.
@@ -328,7 +328,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 exception
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutionException exception
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutionException exception
 
 
 
@@ -337,7 +337,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 procResultFound
-privateboolean procResultFound
+privateboolean procResultFound
 
 
 
@@ -346,7 +346,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 done
-privateboolean done
+privateboolean done
 
 
 
@@ -355,7 +355,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 cancelled
-privateboolean cancelled
+privateboolean cancelled
 
 
 
@@ -364,7 +364,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 result
-privateV result
+privateV result
 
 
 
@@ -373,7 +373,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 admin
-private finalHBaseAdmin admin
+private finalHBaseAdmin admin
 
 
 
@@ -382,7 +382,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 procId
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long procId
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long procId
 
 
 
@@ -399,7 +399,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 ProcedureFuture
-publicProcedureFuture(HBaseAdminadmin,
+publicProcedureFuture(HBaseAdminadmin,
http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">LongprocId)
 
 
@@ -417,7 +417,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 cancel
-publicbooleancancel(booleanmayInterruptIfRunning)
+publicbooleancancel(booleanmayInterruptIfRunning)
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true#cancel-boolean-;
 title="class or interface in java.util.concurrent">cancelin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
@@ -430,7 +430,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 isCancelled
-publicbooleanisCancelled()
+publicbooleanisCancelled()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true#isCancelled--;
 title="class or interface in 
java.util.concurrent">isCancelledin interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
@@ -443,7 +443,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 abortProcedureResult
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest)
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest)

   throws 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
index 5b3b750..a1f3f7e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.DeleteNamespaceProcedureBiConsumer.html
@@ -97,3307 +97,3304 @@
 089import 
org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
 090import 
org.apache.hbase.thirdparty.io.netty.util.Timeout;
 091import 
org.apache.hbase.thirdparty.io.netty.util.TimerTask;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-133import 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
index 04bc645..10ca1e3 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.html
@@ -558,6 +558,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
index 4cdad1c..58e5ca7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder.html
@@ -468,6 +468,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
index 688fbe6..dd668da 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BatchCallerBuilder.html
@@ -468,6 +468,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
index 9699110..ae8e3f7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.BuilderBase.html
@@ -305,6 +305,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
index ce0f985..43cda40 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder.html
@@ -445,6 +445,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.html
 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
index d405629..3ec93bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionImplementation.ServerErrorTracker.html
@@ -371,1638 +371,1646 @@
 363if (params.getWriteBufferSize() == 
BufferedMutatorParams.UNSET) {
 364  
params.writeBufferSize(connectionConfig.getWriteBufferSize());
 365}
-366if (params.getMaxKeyValueSize() == 
BufferedMutatorParams.UNSET) {
-367  
params.maxKeyValueSize(connectionConfig.getMaxKeyValueSize());
-368}
-369// Look to see if an alternate 
BufferedMutation implementation is wanted.
-370// Look in params and in config. If 
null, use default.
-371String implementationClassName = 
params.getImplementationClassName();
-372if (implementationClassName == null) 
{
-373  implementationClassName = 
this.alternateBufferedMutatorClassName;
-374}
-375if (implementationClassName == null) 
{
-376  return new 
BufferedMutatorImpl(this, rpcCallerFactory, rpcControllerFactory, params);
-377}
-378try {
-379  return 
(BufferedMutator)ReflectionUtils.newInstance(Class.forName(implementationClassName),
-380  this, rpcCallerFactory, 
rpcControllerFactory, params);
-381} catch (ClassNotFoundException e) 
{
-382  throw new RuntimeException(e);
-383}
-384  }
-385
-386  @Override
-387  public BufferedMutator 
getBufferedMutator(TableName tableName) {
-388return getBufferedMutator(new 
BufferedMutatorParams(tableName));
-389  }
-390
-391  @Override
-392  public RegionLocator 
getRegionLocator(TableName tableName) throws IOException {
-393return new HRegionLocator(tableName, 
this);
-394  }
-395
-396  @Override
-397  public Admin getAdmin() throws 
IOException {
-398return new HBaseAdmin(this);
-399  }
-400
-401  @Override
-402  public MetricsConnection 
getConnectionMetrics() {
-403return this.metrics;
-404  }
-405
-406  private ExecutorService getBatchPool() 
{
-407if (batchPool == null) {
-408  synchronized (this) {
-409if (batchPool == null) {
-410  int threads = 
conf.getInt("hbase.hconnection.threads.max", 256);
-411  this.batchPool = 
getThreadPool(threads, threads, "-shared", null);
-412  this.cleanupPool = true;
-413}
-414  }
-415}
-416return this.batchPool;
-417  }
-418
-419  private ExecutorService 
getThreadPool(int maxThreads, int coreThreads, String nameHint,
-420  BlockingQueueRunnable 
passedWorkQueue) {
-421// shared HTable thread executor not 
yet initialized
-422if (maxThreads == 0) {
-423  maxThreads = 
Runtime.getRuntime().availableProcessors() * 8;
-424}
-425if (coreThreads == 0) {
-426  coreThreads = 
Runtime.getRuntime().availableProcessors() * 8;
-427}
-428long keepAliveTime = 
conf.getLong("hbase.hconnection.threads.keepalivetime", 60);
-429BlockingQueueRunnable 
workQueue = passedWorkQueue;
-430if (workQueue == null) {
-431  workQueue =
-432new 
LinkedBlockingQueue(maxThreads *
-433
conf.getInt(HConstants.HBASE_CLIENT_MAX_TOTAL_TASKS,
-434
HConstants.DEFAULT_HBASE_CLIENT_MAX_TOTAL_TASKS));
-435  coreThreads = maxThreads;
-436}
-437ThreadPoolExecutor tpe = new 
ThreadPoolExecutor(
-438coreThreads,
-439maxThreads,
-440keepAliveTime,
-441TimeUnit.SECONDS,
-442workQueue,
-443
Threads.newDaemonThreadFactory(toString() + nameHint));
-444tpe.allowCoreThreadTimeOut(true);
-445return tpe;
-446  }
-447
-448  private ExecutorService 
getMetaLookupPool() {
-449if (this.metaLookupPool == null) {
-450  synchronized (this) {
-451if (this.metaLookupPool == null) 
{
-452  //Some of the threads would be 
used for meta replicas
-453  //To start with, 
threads.max.core threads can hit the meta (including replicas).
-454  //After that, requests will get 
queued up in the passed queue, and only after
-455  //the queue is full, a new 
thread will be started
-456  int threads = 
conf.getInt("hbase.hconnection.meta.lookup.threads.max", 128);
-457  this.metaLookupPool = 
getThreadPool(
-458 threads,
-459 threads,
-460 "-metaLookup-shared-", new 
LinkedBlockingQueue());
-461}
-462  }
-463}
-464return this.metaLookupPool;
-465  }
-466
-467  protected ExecutorService 
getCurrentMetaLookupPool() {
-468

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/client/Increment.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Increment.html 
b/devapidocs/org/apache/hadoop/hbase/client/Increment.html
index 4642921..ab0cf22 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Increment.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Increment.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":42,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
+var methods = 
{"i0":10,"i1":10,"i2":42,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -130,8 +130,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
 public class Increment
-extends Mutation
-implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableRow
+extends Mutation
 Used to perform Increment operations on a single row.
  
  This operation ensures atomicity to readers. Increments are done
@@ -182,6 +181,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 Fields inherited from classorg.apache.hadoop.hbase.client.OperationWithAttributes
 ID_ATRIBUTE
 
+
+
+
+
+Fields inherited from interfaceorg.apache.hadoop.hbase.client.Row
+COMPARATOR
+
 
 
 
@@ -250,87 +256,93 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 
-int
-compareTo(Rowi)
-
-
 boolean
-equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+Deprecated.
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+ Use Row.COMPARATOR
 instead
+
+
 
-
+
 protected long
 extraHeapSize()
 Subclasses should override this method to add the heap size 
of their own fields.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getFamilyMapOfLongs()
 Before 0.95, when you called Increment#getFamilyMap(), you 
got back
  a map of families to a list of Longs.
 
 
-
+
 TimeRange
 getTimeRange()
 Gets the TimeRange used for this increment.
 
 
-
+
 boolean
 hasFamilies()
 Method for checking if any families have been inserted into 
this Increment
 
 
-
+
 int
-hashCode()
+hashCode()
+Deprecated.
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+ No replacement.
+
+
 
-
+
 boolean
 isReturnResults()
 
-
+
 int
 numFamilies()
 Method for retrieving the number of families to increment 
from
 
 
-
+
 Increment
 setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,Permissionperms)
 
-
+
 Increment
 setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser,
   Permissionperms)
 
-
+
 Increment
 setAttribute(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 byte[]value)
 Sets an attribute.
 
 
-
+
 Increment
 setCellVisibility(CellVisibilityexpression)
 Sets the visibility expression associated with cells in 
this Mutation.
 
 
-
+
 Increment
 setClusterIds(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUIDclusterIds)
 Marks that the clusters with the given clusterIds have 
consumed the mutation
 
 
-
+
 Increment
 setDurability(Durabilityd)
 Set the durability for this mutation
 
 
-
+
 Increment
 setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
index 81ee67c..d16e2cb 100644
--- a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
@@ -49,8 +49,8 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class PrivateCellUtil.LastOnRowCell
+private static class PrivateCellUtil.LastOnRowCell
 extends PrivateCellUtil.EmptyCell
 
 
@@ -145,7 +145,7 @@ extends Cell
-Cell.DataType
+Cell.Type
 
 
 
@@ -244,9 +244,9 @@ extends getTimestamp()
 
 
-Cell.DataType
+Cell.Type
 getType()
-Returns the type of cell in a human readable format using 
Cell.DataType
+Returns the type of cell in a human readable format using 
Cell.Type
 
 
 
@@ -306,7 +306,7 @@ extends 
 
 FIXED_OVERHEAD
-private static finalint FIXED_OVERHEAD
+private static finalint FIXED_OVERHEAD
 
 
 
@@ -315,7 +315,7 @@ extends 
 
 rowArray
-private finalbyte[] rowArray
+private finalbyte[] rowArray
 
 
 
@@ -324,7 +324,7 @@ extends 
 
 roffset
-private finalint roffset
+private finalint roffset
 
 
 
@@ -333,7 +333,7 @@ extends 
 
 rlength
-private finalshort rlength
+private finalshort rlength
 
 
 
@@ -350,7 +350,7 @@ extends 
 
 LastOnRowCell
-publicLastOnRowCell(byte[]row,
+publicLastOnRowCell(byte[]row,
  introffset,
  shortrlength)
 
@@ -369,7 +369,7 @@ extends 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Returns:
 Approximate 'exclusive deep size' of implementing object.  Includes
@@ -383,7 +383,7 @@ extends 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -403,7 +403,7 @@ extends 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Specified by:
 getRowOffsetin
 interfaceCell
@@ -420,7 +420,7 @@ extends 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 getRowLengthin
 interfaceCell
@@ -437,7 +437,7 @@ extends 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Returns:
 Long value representing time at which this cell was "Put" into the row.  
Typically
@@ -451,7 +451,7 @@ extends 
 
 getTypeByte
-publicbytegetTypeByte()
+publicbytegetTypeByte()
 
 Returns:
 The byte representation of the KeyValue.TYPE of this cell: one of Put, 
Delete, etc
@@ -464,9 +464,9 @@ extends 
 
 getType
-publicCell.DataTypegetType()
+publicCell.TypegetType()
 Description copied from 
interface:ExtendedCell
-Returns the type of cell in a human readable format using 
Cell.DataType
+Returns the type of cell in a human readable format using 
Cell.Type
  
  Note : This does not expose the internal types of Cells like KeyValue.Type.Maximum
 and
  KeyValue.Type.Minimum
@@ -504,8 +504,8 @@ extends 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
deleted file mode 100644
index 8a454ee..000
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
+++ /dev/null
@@ -1,609 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-PrivateCellUtil.LastOnRowColByteBufferCell (Apache HBase 3.0.0-SNAPSHOT 
API)
-
-
-
-
-
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":9,"i12":10,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":10,"i22":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class ReplicationPeerConfig
+public class ReplicationPeerConfig
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 A configuration for the replication peer cluster.
 
@@ -119,6 +119,25 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+(package private) static class
+ReplicationPeerConfig.ReplicationPeerConfigBuilderImpl
+
+
+
+
 
 
 
@@ -183,10 +202,21 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Constructors
 
-Constructor and Description
+Modifier
+Constructor and Description
 
 
-ReplicationPeerConfig()
+
+ReplicationPeerConfig()
+Deprecated.
+as release of 2.0.0, and 
it will be removed in 3.0.0. Use
+ ReplicationPeerConfigBuilder
 to create new ReplicationPeerConfig.
+
+
+
+
+private 
+ReplicationPeerConfig(ReplicationPeerConfig.ReplicationPeerConfigBuilderImplbuilder)
 
 
 
@@ -198,7 +228,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Method Summary
 
-All MethodsInstance MethodsConcrete Methods
+All MethodsStatic MethodsInstance MethodsConcrete MethodsDeprecated Methods
 
 Modifier and Type
 Method and Description
@@ -246,50 +276,97 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-boolean
-replicateAllUserTables()
+static ReplicationPeerConfigBuilder
+newBuilder()
 
 
-ReplicationPeerConfig
-setBandwidth(longbandwidth)
+static ReplicationPeerConfigBuilder
+newBuilder(ReplicationPeerConfigpeerConfig)
 
 
-ReplicationPeerConfig
-setClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
-Set the clusterKey which is the concatenation of the slave 
cluster's:
-  
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
-
+boolean
+replicateAllUserTables()
 
 
 ReplicationPeerConfig
-setExcludeNamespaces(http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringnamespaces)
+setBandwidth(longbandwidth)
+Deprecated.
+as release of 2.0.0, and 
it will be removed in 3.0.0. Use
+ ReplicationPeerConfigBuilder.setBandwidth(long)
 instead.
+
+
 
 
 ReplicationPeerConfig
-setExcludeTableCFsMap(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCFsMap)
+setClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
+Deprecated.
+as release of 2.0.0, and 
it will be removed in 3.0.0. Use
+ ReplicationPeerConfigBuilder.setClusterKey(String)
 instead.
+
+
 
 
 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/apidocs/src-html/org/apache/hadoop/hbase/RegionLoad.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/RegionLoad.html 
b/apidocs/src-html/org/apache/hadoop/hbase/RegionLoad.html
index 9fecb61..613c4ac 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/RegionLoad.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/RegionLoad.html
@@ -29,238 +29,391 @@
 021package org.apache.hadoop.hbase;
 022
 023import java.util.List;
-024
-025import 
org.apache.yetus.audience.InterfaceAudience;
-026import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
-027import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-028import 
org.apache.hadoop.hbase.util.Bytes;
-029import 
org.apache.hadoop.hbase.util.Strings;
-030
-031/**
-032  * Encapsulates per-region load 
metrics.
-033  */
-034@InterfaceAudience.Public
-035public class RegionLoad {
-036
-037  protected 
ClusterStatusProtos.RegionLoad regionLoadPB;
-038
-039  @InterfaceAudience.Private
-040  public 
RegionLoad(ClusterStatusProtos.RegionLoad regionLoadPB) {
-041this.regionLoadPB = regionLoadPB;
-042  }
-043
-044  /**
-045   * @return the region name
-046   */
-047  public byte[] getName() {
-048return 
regionLoadPB.getRegionSpecifier().getValue().toByteArray();
+024import java.util.Map;
+025import java.util.stream.Collectors;
+026import 
org.apache.hadoop.hbase.util.Strings;
+027import 
org.apache.yetus.audience.InterfaceAudience;
+028
+029import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
+030import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
+031
+032/**
+033 * Encapsulates per-region load 
metrics.
+034 * @deprecated As of release 2.0.0, this 
will be removed in HBase 3.0.0
+035 * Use {@link RegionMetrics} 
instead.
+036 */
+037@InterfaceAudience.Public
+038@Deprecated
+039public class RegionLoad implements 
RegionMetrics {
+040  // DONT use this pb object since the 
byte array backed may be modified in rpc layer
+041  // we keep this pb object for BC.
+042  protected 
ClusterStatusProtos.RegionLoad regionLoadPB;
+043  private final RegionMetrics metrics;
+044
+045  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
+046  public 
RegionLoad(ClusterStatusProtos.RegionLoad regionLoadPB) {
+047this.regionLoadPB = regionLoadPB;
+048this.metrics = 
RegionMetricsBuilder.toRegionMetrics(regionLoadPB);
 049  }
 050
-051  /**
-052   * @return the region name as a 
string
-053   */
-054  public String getNameAsString() {
-055return 
Bytes.toStringBinary(getName());
-056  }
-057
-058  /**
-059   * @return the number of stores
+051  RegionLoad(RegionMetrics metrics) {
+052this.metrics = metrics;
+053this.regionLoadPB = 
RegionMetricsBuilder.toRegionLoad(metrics);
+054  }
+055
+056  /**
+057   * @return the region name
+058   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0
+059   * Use {@link 
#getRegionName} instead.
 060   */
-061  public int getStores() {
-062return regionLoadPB.getStores();
-063  }
-064
-065  /**
-066   * @return the number of storefiles
-067   */
-068  public int getStorefiles() {
-069return 
regionLoadPB.getStorefiles();
-070  }
-071
-072  /**
-073   * @return the total size of the 
storefiles, in MB
-074   */
-075  public int getStorefileSizeMB() {
-076return 
regionLoadPB.getStorefileSizeMB();
-077  }
-078
-079  /**
-080   * @return the memstore size, in MB
-081   */
-082  public int getMemStoreSizeMB() {
-083return 
regionLoadPB.getMemStoreSizeMB();
+061  @Deprecated
+062  public byte[] getName() {
+063return metrics.getRegionName();
+064  }
+065
+066  @Override
+067  public byte[] getRegionName() {
+068return metrics.getRegionName();
+069  }
+070
+071  @Override
+072  public int getStoreCount() {
+073return metrics.getStoreCount();
+074  }
+075
+076  @Override
+077  public int getStoreFileCount() {
+078return metrics.getStoreFileCount();
+079  }
+080
+081  @Override
+082  public Size getStoreFileSize() {
+083return metrics.getStoreFileSize();
 084  }
 085
-086  /**
-087   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0
-088   * ((a 
href="https://issues.apache.org/jira/browse/HBASE-3935"HBASE-3935/a;)).
-089   * Use {@link 
#getStorefileIndexSizeKB()} instead.
-090   */
-091  @Deprecated
-092  public int getStorefileIndexSizeMB() 
{
-093// Return value divided by 1024
-094return (int) 
(regionLoadPB.getStorefileIndexSizeKB()  10);
-095  }
-096
-097  public long getStorefileIndexSizeKB() 
{
-098return 
regionLoadPB.getStorefileIndexSizeKB();
+086  @Override
+087  public Size getMemStoreSize() {
+088return metrics.getMemStoreSize();
+089  }
+090
+091  @Override
+092  public 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html 
b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
index 3445d05..b85b00e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
@@ -31,996 +31,997 @@
 023import com.google.protobuf.Message;
 024import com.google.protobuf.Service;
 025import 
com.google.protobuf.ServiceException;
-026import org.apache.commons.logging.Log;
-027import 
org.apache.commons.logging.LogFactory;
-028import 
org.apache.hadoop.conf.Configuration;
-029import org.apache.hadoop.hbase.Cell;
-030import 
org.apache.hadoop.hbase.CellUtil;
-031import 
org.apache.hadoop.hbase.CompareOperator;
-032import 
org.apache.hadoop.hbase.HBaseConfiguration;
-033import 
org.apache.hadoop.hbase.HConstants;
-034import 
org.apache.hadoop.hbase.HTableDescriptor;
-035import 
org.apache.hadoop.hbase.KeyValue;
-036import 
org.apache.hadoop.hbase.TableName;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import 
org.apache.hadoop.hbase.client.Append;
-039import 
org.apache.hadoop.hbase.client.Delete;
-040import 
org.apache.hadoop.hbase.client.Durability;
-041import 
org.apache.hadoop.hbase.client.Get;
-042import 
org.apache.hadoop.hbase.client.Increment;
-043import 
org.apache.hadoop.hbase.client.Put;
-044import 
org.apache.hadoop.hbase.client.Result;
-045import 
org.apache.hadoop.hbase.client.ResultScanner;
-046import 
org.apache.hadoop.hbase.client.Row;
-047import 
org.apache.hadoop.hbase.client.RowMutations;
-048import 
org.apache.hadoop.hbase.client.Scan;
-049import 
org.apache.hadoop.hbase.client.Table;
-050import 
org.apache.hadoop.hbase.client.TableDescriptor;
-051import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-052import 
org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
-053import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-054import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-055import 
org.apache.hadoop.hbase.io.TimeRange;
-056import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-057import 
org.apache.hadoop.hbase.rest.Constants;
-058import 
org.apache.hadoop.hbase.rest.model.CellModel;
-059import 
org.apache.hadoop.hbase.rest.model.CellSetModel;
-060import 
org.apache.hadoop.hbase.rest.model.RowModel;
-061import 
org.apache.hadoop.hbase.rest.model.ScannerModel;
-062import 
org.apache.hadoop.hbase.rest.model.TableSchemaModel;
-063import 
org.apache.hadoop.hbase.util.Bytes;
-064import 
org.apache.hadoop.util.StringUtils;
-065
-066import java.io.IOException;
-067import java.io.InterruptedIOException;
-068import 
java.io.UnsupportedEncodingException;
-069import java.net.URLEncoder;
-070import java.util.ArrayList;
-071import java.util.Collection;
-072import java.util.Iterator;
-073import java.util.List;
-074import java.util.Map;
-075import java.util.Set;
-076import java.util.TreeMap;
-077import java.util.concurrent.TimeUnit;
-078
-079import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-080
-081/**
-082 * HTable interface to remote tables 
accessed via REST gateway
-083 */
-084@InterfaceAudience.Public
-085public class RemoteHTable implements 
Table {
-086
-087  private static final Log LOG = 
LogFactory.getLog(RemoteHTable.class);
-088
-089  final Client client;
-090  final Configuration conf;
-091  final byte[] name;
-092  final int maxRetries;
-093  final long sleepTime;
-094
-095  @SuppressWarnings("rawtypes")
-096  protected String buildRowSpec(final 
byte[] row, final Map familyMap,
-097  final long startTime, final long 
endTime, final int maxVersions) {
-098StringBuffer sb = new 
StringBuffer();
-099sb.append('/');
-100sb.append(Bytes.toString(name));
-101sb.append('/');
-102sb.append(toURLEncodedBytes(row));
-103Set families = 
familyMap.entrySet();
-104if (families != null) {
-105  Iterator i = 
familyMap.entrySet().iterator();
-106  sb.append('/');
-107  while (i.hasNext()) {
-108Map.Entry e = 
(Map.Entry)i.next();
-109Collection quals = 
(Collection)e.getValue();
-110if (quals == null || 
quals.isEmpty()) {
-111  // this is an unqualified 
family. append the family name and NO ':'
-112  
sb.append(toURLEncodedBytes((byte[])e.getKey()));
-113} else {
-114  Iterator ii = 
quals.iterator();
-115  while (ii.hasNext()) {
-116
sb.append(toURLEncodedBytes((byte[])e.getKey()));
-117sb.append(':');
-118Object o = ii.next();
-119// Puts use byte[] but 
Deletes use KeyValue
-120if (o instanceof byte[]) {
-121  
sb.append(toURLEncodedBytes((byte[])o));
-122} else if 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
index 16fd892..505e3ae 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
@@ -903,462 +903,463 @@
 895 * will mask a later Put with lower 
ts. Set this to true to enable new semantics of versions.
 896 * We will also consider mvcc in 
versions. See HBASE-15968 for details.
 897 */
-898public boolean isNewVersionBehavior() 
{
-899  return 
getStringOrDefault(NEW_VERSION_BEHAVIOR_BYTES,
-900  Boolean::parseBoolean, 
DEFAULT_NEW_VERSION_BEHAVIOR);
-901}
-902
-903public 
ModifyableColumnFamilyDescriptor setNewVersionBehavior(boolean 
newVersionBehavior) {
-904  return 
setValue(NEW_VERSION_BEHAVIOR_BYTES, Boolean.toString(newVersionBehavior));
-905}
-906
-907@Override
-908public int getTimeToLive() {
-909  return 
getStringOrDefault(TTL_BYTES, Integer::parseInt, DEFAULT_TTL);
-910}
-911
-912/**
-913 * @param timeToLive Time-to-live of 
cell contents, in seconds.
-914 * @return this (for chained 
invocation)
-915 */
-916public 
ModifyableColumnFamilyDescriptor setTimeToLive(int timeToLive) {
-917  return setValue(TTL_BYTES, 
Integer.toString(timeToLive));
-918}
-919
-920/**
-921 * @param timeToLive Time-to-live of 
cell contents, in seconds.
-922 * @return this (for chained 
invocation)
-923 * @throws 
org.apache.hadoop.hbase.exceptions.HBaseException
-924 */
-925public 
ModifyableColumnFamilyDescriptor setTimeToLive(String timeToLive) throws 
HBaseException {
-926  return 
setTimeToLive(Integer.parseInt(PrettyPrinter.valueOf(timeToLive, 
Unit.TIME_INTERVAL)));
-927}
-928
-929@Override
-930public int getMinVersions() {
-931  return 
getStringOrDefault(MIN_VERSIONS_BYTES, Integer::valueOf, 
DEFAULT_MIN_VERSIONS);
-932}
-933
-934/**
-935 * @param minVersions The minimum 
number of versions to keep. (used when
-936 * timeToLive is set)
-937 * @return this (for chained 
invocation)
-938 */
-939public 
ModifyableColumnFamilyDescriptor setMinVersions(int minVersions) {
-940  return setValue(MIN_VERSIONS_BYTES, 
Integer.toString(minVersions));
-941}
-942
-943@Override
-944public boolean isBlockCacheEnabled() 
{
-945  return 
getStringOrDefault(BLOCKCACHE_BYTES, Boolean::valueOf, DEFAULT_BLOCKCACHE);
-946}
-947
-948/**
-949 * @param blockCacheEnabled True if 
hfile DATA type blocks should be cached
-950 * (We always cache INDEX and BLOOM 
blocks; you cannot turn this off).
-951 * @return this (for chained 
invocation)
-952 */
-953public 
ModifyableColumnFamilyDescriptor setBlockCacheEnabled(boolean 
blockCacheEnabled) {
-954  return setValue(BLOCKCACHE_BYTES, 
Boolean.toString(blockCacheEnabled));
-955}
-956
-957@Override
-958public BloomType getBloomFilterType() 
{
-959  return 
getStringOrDefault(BLOOMFILTER_BYTES, BloomType::valueOf, 
DEFAULT_BLOOMFILTER);
-960}
-961
-962public 
ModifyableColumnFamilyDescriptor setBloomFilterType(final BloomType bt) {
-963  return setValue(BLOOMFILTER_BYTES, 
bt.name());
-964}
-965
-966@Override
-967public int getScope() {
-968  return 
getStringOrDefault(REPLICATION_SCOPE_BYTES, Integer::valueOf, 
DEFAULT_REPLICATION_SCOPE);
-969}
-970
-971/**
-972 * @param scope the scope tag
-973 * @return this (for chained 
invocation)
-974 */
-975public 
ModifyableColumnFamilyDescriptor setScope(int scope) {
-976  return 
setValue(REPLICATION_SCOPE_BYTES, Integer.toString(scope));
-977}
-978
-979@Override
-980public boolean isCacheDataOnWrite() 
{
-981  return 
getStringOrDefault(CACHE_DATA_ON_WRITE_BYTES, Boolean::valueOf, 
DEFAULT_CACHE_DATA_ON_WRITE);
-982}
-983
-984/**
-985 * @param value true if we should 
cache data blocks on write
-986 * @return this (for chained 
invocation)
-987 */
-988public 
ModifyableColumnFamilyDescriptor setCacheDataOnWrite(boolean value) {
-989  return 
setValue(CACHE_DATA_ON_WRITE_BYTES, Boolean.toString(value));
-990}
-991
-992@Override
-993public boolean 
isCacheIndexesOnWrite() {
-994  return 
getStringOrDefault(CACHE_INDEX_ON_WRITE_BYTES, Boolean::valueOf, 
DEFAULT_CACHE_INDEX_ON_WRITE);
-995}
-996
-997/**
-998 * @param value true if we should 
cache index blocks on write
-999 * @return this (for chained 
invocation)
-1000 */
-1001public 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RpcSchedulerFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RpcSchedulerFactory.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RpcSchedulerFactory.html
index 91c17f5..d0f065f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RpcSchedulerFactory.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RpcSchedulerFactory.html
@@ -83,6 +83,10 @@
 
 
 
+org.apache.hadoop.hbase.master
+
+
+
 org.apache.hadoop.hbase.regionserver
 
 
@@ -91,6 +95,28 @@
 
 
 
+
+
+
+Uses of RpcSchedulerFactory in org.apache.hadoop.hbase.master
+
+Methods in org.apache.hadoop.hbase.master
 with parameters of type RpcSchedulerFactory
+
+Modifier and Type
+Method and Description
+
+
+
+protected RpcServerInterface
+MasterRpcServices.createRpcServer(Serverserver,
+   org.apache.hadoop.conf.Configurationconf,
+   RpcSchedulerFactoryrpcSchedulerFactory,
+   http://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddressbindAddress,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+
+
+
+
 
 
 
@@ -116,6 +142,23 @@
 
 
 
+
+Methods in org.apache.hadoop.hbase.regionserver
 with parameters of type RpcSchedulerFactory
+
+Modifier and Type
+Method and Description
+
+
+
+protected RpcServerInterface
+RSRpcServices.createRpcServer(Serverserver,
+   org.apache.hadoop.conf.Configurationconf,
+   RpcSchedulerFactoryrpcSchedulerFactory,
+   http://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddressbindAddress,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 22bdb27..7981964 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -703,19 +703,19 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.regionserver.ScanType
 org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
 org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
 org.apache.hadoop.hbase.regionserver.FlushType
+org.apache.hadoop.hbase.regionserver.Region.Operation
 org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
 org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
-org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
 org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
-org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
+org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.ScanType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
index 7351d33..088fc77 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
@@ -674,6 +674,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
index 7c59e27..c904c56 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
@@ -119,4048 +119,4054 @@
 111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
 113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-150import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-151import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-152import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-153import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-154import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-155import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
-156import 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index f87f99d..96f5746 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -1304,1230 +1304,1244 @@
 16100
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+DEFAULT_STATUS_MULTICAST_PUBLISHER_BIND_ADDRESS
+"0.0.0.0"
+
+
 
 
 publicstaticfinalint
 DEFAULT_THREAD_WAKE_FREQUENCY
 1
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_USE_META_REPLICAS
 false
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_VERSION_FILE_WRITE_ATTEMPTS
 3
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DEFAULT_WAL_STORAGE_POLICY
 "HOT"
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_ZK_SESSION_TIMEOUT
 9
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DEFAULT_ZOOKEEPER_ZNODE_PARENT
 "/hbase"
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_ZOOKEPER_CLIENT_PORT
 2181
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS
 300
 
-
+
 
 
 publicstaticfinalint
 DELIMITER
 44
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 ENABLE_CLIENT_BACKPRESSURE
 "hbase.client.backpressure.enabled"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 ENABLE_DATA_FILE_UMASK
 "hbase.data.umask.enable"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 ENABLE_WAL_COMPRESSION
 "hbase.regionserver.wal.enablecompression"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 ENABLE_WAL_ENCRYPTION
 "hbase.regionserver.wal.encryption"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 FILE_SYSTEM_VERSION
 "8"
 
-
+
 
 
 publicstaticfinalint
 FOREVER
 2147483647
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_BALANCER_MAX_BALANCING
 "hbase.balancer.max.balancing"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_BALANCER_PERIOD
 "hbase.balancer.period"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CANARY_READ_RAW_SCAN_KEY
 "hbase.canary.read.raw.enabled"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 "hbase.canary.write.data.ttl"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_LOWERLIMIT_KEY
 "hbase.canary.write.perserver.regions.lowerLimit"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_UPPERLIMIT_KEY
 "hbase.canary.write.perserver.regions.upperLimit"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY
 "hbase.canary.write.table.check.period"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_VALUE_SIZE_KEY
 "hbase.canary.write.value.size"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 HBASE_CHECKSUM_VERIFICATION
 "hbase.regionserver.checksum.verify"
 
-
+
 
 
 publicstaticfinalboolean
 HBASE_CLIENT_ENABLE_FAST_FAIL_MODE_DEFAULT
 false
 
-
+
 
 
 publicstaticfinallong
 HBASE_CLIENT_FAST_FAIL_CLEANUP_DURATION_MS_DEFAULT
 60L
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 4049048..9018f2b 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -176,6 +176,8 @@
 
 abort
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationQueuesArguments
 
+abort(String,
 Throwable) - Method in class org.apache.hadoop.hbase.SharedConnection
+
 abort(String,
 Throwable) - Method in class org.apache.hadoop.hbase.zookeeper.ZKWatcher
 
 ABORT_ON_ERROR_KEY
 - Static variable in class org.apache.hadoop.hbase.coprocessor.CoprocessorHost
@@ -1335,7 +1337,7 @@
 
 Add a client port to the list.
 
-addClusterId(UUID)
 - Method in class org.apache.hadoop.hbase.wal.WALKey
+addClusterId(UUID)
 - Method in class org.apache.hadoop.hbase.wal.WALKeyImpl
 
 Marks that the cluster with the given clusterId has 
consumed the change
 
@@ -2214,6 +2216,18 @@
 
 addRequiredOptWithArg(String,
 String, String) - Method in class org.apache.hadoop.hbase.util.AbstractHBaseTool
 
+addResource(String)
 - Method in class org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration
+
+addResource(URL)
 - Method in class org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration
+
+addResource(Path)
 - Method in class org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration
+
+addResource(InputStream)
 - Method in class org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration
+
+addResource(InputStream,
 String) - Method in class org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration
+
+addResource(Configuration)
 - Method in class org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration
+
 address 
- Variable in class org.apache.hadoop.hbase.ipc.ConnectionId
 
 address
 - Variable in class org.apache.hadoop.hbase.ipc.SimpleRpcServer
@@ -3023,16 +3037,16 @@
 
 append(W,
 FSWALEntry) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL
 
-append(RegionInfo,
 WALKey, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL
+append(RegionInfo,
 WALKeyImpl, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL
 
 NOTE: This append, at a time that is usually after this 
call returns, starts an mvcc
  transaction by calling 'begin' wherein which we assign this update a 
sequenceid.
 
-append(RegionInfo,
 WALKey, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL
+append(RegionInfo,
 WALKeyImpl, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL
 
 append(WAL.Entry)
 - Method in class org.apache.hadoop.hbase.regionserver.wal.AsyncProtobufLogWriter
 
-append(RegionInfo,
 WALKey, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.FSHLog
+append(RegionInfo,
 WALKeyImpl, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.FSHLog
 
 append(FSWALEntry)
 - Method in class org.apache.hadoop.hbase.regionserver.wal.FSHLog.RingBufferEventHandler
 
@@ -3057,9 +3071,9 @@
 
 append(TNode,
 TNode) - Static method in class org.apache.hadoop.hbase.util.AvlUtil.AvlIterableList
 
-append(RegionInfo,
 WALKey, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.wal.DisabledWALProvider.DisabledWAL
+append(RegionInfo,
 WALKeyImpl, WALEdit, boolean) - Method in class 
org.apache.hadoop.hbase.wal.DisabledWALProvider.DisabledWAL
 
-append(RegionInfo,
 WALKey, WALEdit, boolean) - Method in interface 
org.apache.hadoop.hbase.wal.WAL
+append(RegionInfo,
 WALKeyImpl, WALEdit, boolean) - Method in interface 
org.apache.hadoop.hbase.wal.WAL
 
 Append a set of edits to the WAL.
 
@@ -6590,12 +6604,6 @@
 
 A comma-delimited array of values for use as bucket 
sizes.
 
-BUCKET_CACHE_COMBINED_KEY
 - Static variable in class org.apache.hadoop.hbase.io.hfile.CacheConfig
-
-If the bucket cache is used in league with the lru on-heap 
block cache (meta blocks such
- as indices and blooms are kept in the lru blockcache and the data blocks in 
the
- bucket cache).
-
 BUCKET_CACHE_IOENGINE_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
 Current ioengine options in include: heap, offheap and 
file:PATH (where PATH is the path
@@ -7752,16 +7760,10 @@
 
 Configuration key to cache data blocks in compressed and/or 
encrypted format.
 
-CACHE_DATA_IN_L1
 - Static variable in class org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder
-
-Key for cache data into L1 if cache is set up with more 
than one tier.
-
 CACHE_DATA_IN_L1
 - Static variable in class org.apache.hadoop.hbase.HColumnDescriptor
 
 Deprecated.
 
-CACHE_DATA_IN_L1_BYTES
 - Static variable in class org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder
-
 CACHE_DATA_ON_READ_KEY
 - Static variable in class org.apache.hadoop.hbase.io.hfile.CacheConfig
 
 Configuration key to cache data blocks 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
index 19cade2..c6f511e 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -3286,7 +3286,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client performs a Get
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Specified by:
 preGetOpin
 interfaceRegionObserver
@@ -3314,7 +3315,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client tests for existence using a Get.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Specified by:
 preExistsin
 interfaceRegionObserver
@@ -3343,7 +3345,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client stores a value.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'put' beyond the life of this 
invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3400,7 +3403,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client deletes a value.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'delete' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3493,7 +3497,8 @@ implements Description copied from 
interface:RegionObserver
 Called before checkAndPut.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'put' beyond the life of this 
invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3538,7 +3543,8 @@ implements Description copied from 
interface:RegionObserver
 Called before checkAndDelete.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'delete' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3623,7 +3630,8 @@ implements Description copied from 
interface:RegionObserver
 Called before Append.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'append' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3690,7 +3699,8 @@ implements Description copied from 
interface:RegionObserver
 Called before Increment.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'increment' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3752,7 +3763,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client asks for the next row on a scanner.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells returned by scanner, beyond the 
life of this
  invocation. If need a Cell reference for later use, copy the cell and use 
that.
@@ -3906,7 +3919,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client closes a scanner.
  
- Call 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 3edfbef..9707b2c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -2459,5936 +2459,5935 @@
 2451  }
 2452
 2453  for (HStore s : storesToFlush) {
-2454MemStoreSize flushableSize = 
s.getFlushableSize();
-2455
totalSizeOfFlushableStores.incMemStoreSize(flushableSize);
-2456
storeFlushCtxs.put(s.getColumnFamilyDescriptor().getName(),
-2457  
s.createFlushContext(flushOpSeqId, tracker));
-2458// for writing stores to WAL
-2459
committedFiles.put(s.getColumnFamilyDescriptor().getName(), null);
-2460
storeFlushableSize.put(s.getColumnFamilyDescriptor().getName(), 
flushableSize);
-2461  }
-2462
-2463  // write the snapshot start to 
WAL
-2464  if (wal != null  
!writestate.readOnly) {
-2465FlushDescriptor desc = 
ProtobufUtil.toFlushDescriptor(FlushAction.START_FLUSH,
-2466getRegionInfo(), 
flushOpSeqId, committedFiles);
-2467// No sync. Sync is below where 
no updates lock and we do FlushAction.COMMIT_FLUSH
-2468WALUtil.writeFlushMarker(wal, 
this.getReplicationScope(), getRegionInfo(), desc, false,
-2469mvcc);
-2470  }
-2471
-2472  // Prepare flush (take a 
snapshot)
-2473  for (StoreFlushContext flush : 
storeFlushCtxs.values()) {
-2474flush.prepare();
-2475  }
-2476} catch (IOException ex) {
-2477  doAbortFlushToWAL(wal, 
flushOpSeqId, committedFiles);
-2478  throw ex;
-2479} finally {
-2480  
this.updatesLock.writeLock().unlock();
-2481}
-2482String s = "Finished memstore 
snapshotting " + this + ", syncing WAL and waiting on mvcc, " +
-2483"flushsize=" + 
totalSizeOfFlushableStores;
-2484status.setStatus(s);
-2485doSyncOfUnflushedWALChanges(wal, 
getRegionInfo());
-2486return new 
PrepareFlushResult(storeFlushCtxs, committedFiles, storeFlushableSize, 
startTime,
-2487flushOpSeqId, flushedSeqId, 
totalSizeOfFlushableStores);
-2488  }
-2489
-2490  /**
-2491   * Utility method broken out of 
internalPrepareFlushCache so that method is smaller.
-2492   */
-2493  private void 
logFatLineOnFlush(CollectionHStore storesToFlush, long sequenceId) {
-2494if (!LOG.isInfoEnabled()) {
-2495  return;
-2496}
-2497// Log a fat line detailing what is 
being flushed.
-2498StringBuilder perCfExtras = null;
-2499if (!isAllFamilies(storesToFlush)) 
{
-2500  perCfExtras = new 
StringBuilder();
-2501  for (HStore store: storesToFlush) 
{
-2502perCfExtras.append("; 
").append(store.getColumnFamilyName());
-2503perCfExtras.append("=")
-2504
.append(StringUtils.byteDesc(store.getFlushableSize().getDataSize()));
-2505  }
-2506}
-2507LOG.info("Flushing " + + 
storesToFlush.size() + "/" + stores.size() +
-2508" column families, memstore=" + 
StringUtils.byteDesc(this.memstoreDataSize.get()) +
-2509((perCfExtras != null  
perCfExtras.length()  0)? perCfExtras.toString(): "") +
-2510((wal != null) ? "" : "; WAL is 
null, using passed sequenceid=" + sequenceId));
-2511  }
-2512
-2513  private void doAbortFlushToWAL(final 
WAL wal, final long flushOpSeqId,
-2514  final Mapbyte[], 
ListPath committedFiles) {
-2515if (wal == null) return;
-2516try {
-2517  FlushDescriptor desc = 
ProtobufUtil.toFlushDescriptor(FlushAction.ABORT_FLUSH,
-2518  getRegionInfo(), flushOpSeqId, 
committedFiles);
-2519  WALUtil.writeFlushMarker(wal, 
this.getReplicationScope(), getRegionInfo(), desc, false,
-2520  mvcc);
-2521} catch (Throwable t) {
-2522  LOG.warn("Received unexpected 
exception trying to write ABORT_FLUSH marker to WAL:" +
-2523  
StringUtils.stringifyException(t));
-2524  // ignore this since we will be 
aborting the RS with DSE.
-2525}
-2526// we have called 
wal.startCacheFlush(), now we have to abort it
-2527
wal.abortCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
-2528  }
-2529
-2530  /**
-2531   * Sync unflushed WAL changes. See 
HBASE-8208 for details
-2532   */
-2533  private static void 
doSyncOfUnflushedWALChanges(final WAL wal, final RegionInfo hri)
-2534  throws IOException {
-2535if (wal == null) {
-2536  return;
-2537}
-2538try {
-2539  wal.sync(); // ensure that flush 
marker is sync'ed
-2540} catch (IOException ioe) {
-2541  
wal.abortCacheFlush(hri.getEncodedNameAsBytes());
-2542 

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
index 71724c6..0cd9f33 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStates.RegionStateNode.html
@@ -150,31 +150,27 @@
 
 
 protected RegionStates.RegionStateNode
-RegionStates.createRegionNode(RegionInforegionInfo)
+RegionStates.createRegionStateNode(RegionInforegionInfo)
 
 
 protected RegionStates.RegionStateNode
-RegionStates.getOrCreateRegionNode(RegionInforegionInfo)
+RegionStates.getOrCreateRegionStateNode(RegionInforegionInfo)
 
 
 RegionStates.RegionStateNode
-RegionStates.RegionFailedOpen.getRegionNode()
+RegionTransitionProcedure.getRegionState(MasterProcedureEnvenv)
 
 
-protected RegionStates.RegionStateNode
-RegionStates.getRegionNode(RegionInforegionInfo)
+RegionStates.RegionStateNode
+RegionStates.RegionFailedOpen.getRegionStateNode()
 
 
-(package private) RegionStates.RegionStateNode
-RegionStates.getRegionNodeFromEncodedName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringencodedRegionName)
+protected RegionStates.RegionStateNode
+RegionStates.getRegionStateNode(RegionInforegionInfo)
 
 
 (package private) RegionStates.RegionStateNode
-RegionStates.getRegionNodeFromName(byte[]regionName)
-
-
-RegionStates.RegionStateNode
-RegionTransitionProcedure.getRegionState(MasterProcedureEnvenv)
+RegionStates.getRegionStateNodeFromName(byte[]regionName)
 
 
 
@@ -186,21 +182,21 @@
 
 
 
-(package private) http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionRegionStates.RegionStateNode
-RegionStates.getRegionNodes()
-
-
 http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetRegionStates.RegionStateNode
 RegionStates.ServerStateNode.getRegions()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionStates.RegionStateNode
 AssignmentManager.getRegionsInTransition()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionStates.RegionStateNode
 RegionStates.getRegionsInTransition()
 
+
+(package private) http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionRegionStates.RegionStateNode
+RegionStates.getRegionStateNodes()
+
 
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListRegionStates.RegionStateNode
 RegionStates.getTableRegionStateNodes(TableNametableName)
@@ -234,8 +230,7 @@
 
 
 RegionStates.ServerStateNode
-RegionStates.addRegionToServer(ServerNameserverName,
- RegionStates.RegionStateNoderegionNode)
+RegionStates.addRegionToServer(RegionStates.RegionStateNoderegionNode)
 
 
 RegionStates.RegionFailedOpen
@@ -250,158 +245,158 @@
 RegionStates.RegionStateNode.compareTo(RegionStates.RegionStateNodeother)
 
 
-private RegionState
-RegionStates.createRegionState(RegionStates.RegionStateNodenode)
-
-
 protected void
 UnassignProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
-
+
 protected abstract void
 RegionTransitionProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
-
+
 protected void
 AssignProcedure.finishTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
-
+
 private void
 AssignProcedure.handleFailure(MasterProcedureEnvenv,
  RegionStates.RegionStateNoderegionNode)
 Called when dispatch or subsequent OPEN request fail.
 
 
-
+
 (package private) boolean
 RegionStates.include(RegionStates.RegionStateNodenode,
booleanoffline)
 Utility.
 
 
-
+
 private boolean
 AssignProcedure.incrementAndCheckMaxAttempts(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
-
+
 protected boolean
 RegionTransitionProcedure.isServerOnline(MasterProcedureEnvenv,
   RegionStates.RegionStateNoderegionNode)
 
-
+
 void
 AssignmentManager.markRegionAsClosed(RegionStates.RegionStateNoderegionNode)
 
-
+
 void
 AssignmentManager.markRegionAsClosing(RegionStates.RegionStateNoderegionNode)
 
-
+
 void
 

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
index 11c857e..380c1d7 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":9,"i37":10,"i38":42,"i39":10,"i40":10,"i41":42,"i42":10,"i43":10,"i44":42,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":42,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":42,"i61":42,"i62":42,"i63":42,"i64":10,"i65":10,"i66":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":42,"i8":10,"i9":42,"i10":10,"i11":10,"i12":42,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":9,"i37":10,"i38":42,"i39":10,"i40":10,"i41":42,"i42":10,"i43":10,"i44":42,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":42,"i51":42,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":42,"i61":42,"i62":42,"i63":42,"i64":10,"i65":10,"i66":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -348,8 +348,7 @@ implements CompareFilter.CompareOpcompareOp,
   byte[]value,
   Deletedelete)
-Atomically checks if a row/family/qualifier value matches 
the expected
- value.
+Deprecated.
 
 
 
@@ -372,7 +371,7 @@ implements CompareFilter.CompareOpcompareOp,
   byte[]value,
   RowMutationsrm)
-Atomically checks if a row/family/qualifier value matches 
the expected value.
+Deprecated.
 
 
 
@@ -405,8 +404,7 @@ implements CompareFilter.CompareOpcompareOp,
byte[]value,
Putput)
-Atomically checks if a row/family/qualifier value matches 
the expected
- value.
+Deprecated.
 
 
 
@@ -667,7 +665,7 @@ implements 
 HTableDescriptor
 getTableDescriptor()
-Gets the table descriptor for 
this table.
+Deprecated.
 
 
 
@@ -1088,8 +1086,10 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getTableDescriptor
-publicHTableDescriptorgetTableDescriptor()
-throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
+publicHTableDescriptorgetTableDescriptor()
+throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+Deprecated.
 Gets the table descriptor for 
this table.
 
 Specified by:
@@ -1105,7 +1105,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getDescriptor
-publicTableDescriptorgetDescriptor()
+publicTableDescriptorgetDescriptor()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:Table
 Gets the table 
descriptor for this table.
@@ -1123,7 +1123,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c
 
 
 getKeysAndRegionsInRange
-privatePairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocationgetKeysAndRegionsInRange(byte[]startKey,
+privatePairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocationgetKeysAndRegionsInRange(byte[]startKey,
   
byte[]endKey,
   
booleanincludeEndKey)
throws 

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
index c5ba779..3ca0cce 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
@@ -551,7 +551,7 @@ implements MasterObserver
-postAbortProcedure,
 postAddReplicationPeer,
 postAddRSGroup,
 postAssign,
 postBalance, postBalanceRSGroup,
 postBalanceSwitch,
 postClearDeadServers,
 postCloneSnapshot,
 postCompletedCreateTableAction,
 postCompletedDeleteTableAction,
 postCompletedDisableTableAction,
 postCompletedEnableTableAction,
 postCompletedMergeRegionsAction,
 postCompletedModifyTableAction,
 postCompletedSplitRegionAction,
 postCompletedTruncateTab
 leAction, postCreateNamespace,
 postCreateTable,
 postDecommissionRegionServers,
 postDeleteNamespace,
 postDeleteSnapshot,
 postDeleteTable,
 postDisableReplicationPeer,
 postDisableTable,
 postEnableReplicationPe
 er, postEnableTable,
 postGetClusterStatus,
 postGetLocks,
 postGetNamespaceDescriptor,
 postGetProcedures, postGetReplicationPeerConfig,
 postGetTableDescriptors,
 postGetTableNames,
 postListDecommissionedRegionServers,
 postListNamespaceDescriptors,
 postListReplicationPeers,
 postListSnapshot,
 postLockHeartbeat,
 postMergeRegions,
 postMergeRegionsCommitAction,
 postModifyNamespace,
 postModifyTable,
 postMove
 , postMoveServers,
 postMoveServersAndTables,
 postMoveTables,
 postRecommissionRegionServer,
 postRegionOffline,
 postRemoveReplicationPeer,
 postRemoveRSGroup,
 postRequestLock,
 postRestoreSnapshot,
 postRollBackMergeRegionsAction,
 postRollBackSplitRegionAction,
 postSetNamespaceQuota,
 postSetTableQuota, postSetUserQuota,
 postSetUserQuota,
 postSetUserQuota,
 postSnapshot,
 postTableFlush,
 postTruncateTable,
 postUnassign,
 
 postUpdateReplicationPeerConfig, preAbortProcedure,
 preAddReplicationPeer,
 preAddRSGroup,
 preAssign,
 preBalance, preBalanceRSGroup,
 preBalanceSwitch,
 preClearDeadServers,
 preCloneSnapshot,
 preCreateNamespace,
 preCreateTable,
 preCreateTableAction,
 preDecommissionRegionServers,
 preDeleteNamespace, preDeleteSnapshot,
 preDeleteTable,
 preDeleteTableAction,
 preDisableReplicationPeer,
 preDisableTableAction,
 preEnableReplicationPeer,
 preEnableTable,
 preEnableTableAction,
 preGetClusterStatus,
 preGetLocks,
 preGetNamespaceDescriptor,
 preGetProcedures,
 preGetReplicationPeerConfig,
 preGetTableDescriptors,
 preGetTableNames,
 preListDecommissionedRegionServers,
 preListNamespaceDescriptors,
 preListReplicationPeers,
 preListSnapshot, preLockHeartbeat,
 preMasterInitialization,
 preMergeRegions,
 preMergeRegionsAction,
 preMergeRegionsCommitAction,
 preModifyNamespace,
 preModifyTableAction,
 preMove,
 preMoveServers,
 preMoveServersAndTables,
 preMoveTables,
 preRecommissionRegionServer,
 preRegionOffline, preRemoveReplicationPeer,
 preRemoveRSGroup,
 preRequestLock,
 preRestoreSnapshot, href="../../../../../../org/apache/hadoop/hbase/coprocessor/MasterObserver.html#preSetNamespaceQuota-org.apache.hadoop.hbase.coprocessor.ObserverContext-java.lang.String-org.apache.hadoop.hbase.quotas.GlobalQuotaSettings-">preSetNamespaceQuota,
 > href="../../../../../../org/apache/hadoop/hbase/coprocessor/MasterObserver.html#preSetSplitOrMergeEnabled-org.apache.hadoop.hbase.coprocessor.ObserverContext-boolean-org.apache.hadoop.hbase.client.MasterSwitchType-">preSetSplitOrMergeEnabled,
 > href="../../../../../../org/apache/hadoop/hbase/coprocessor/MasterObserver.html#preSetTableQuota-org.apache.hadoop.hbase.coprocessor.ObserverContext-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.quotas.GlobalQuotaSettings-">preSetTableQuota,
 > href="../../../../../../org/apache/hadoop/hbase/coprocessor/MasterObserver.html#preSetUserQuota-org.apache.hadoop.hbase.coprocessor.ObserverContext-java.lang.String-org.apache.hadoop.hbase.quotas.GlobalQuotaSettings-">preSetUser
 Quota, preSetUserQuota,
 preSetUserQuota,
 preShutdown,
 preSnapshot,
 preSplitRegion,
 preSplitRegionAction,
 preSplitRegionAfterMETAAction,
 preSplitRegionBeforeMETAAction,
 p
 reStopMaster, preTableFlush,
 preTruncateTable,
 preTruncateTableAction,
 preUnassign,
 preUpdateReplicationPeerConfig
+postAbortProcedure,
 postAddReplicationPeer,
 postAddRSGroup,
 postAssign,
 postBalance, postBalanceRSGroup,
 postBalanceSwitch,
 postClearDeadServers,
 postCloneSnapshot,
 postCompletedCreateTableAction,
 postCompletedDeleteTableAction,
 postCompletedDisableTableAction,
 postCompletedEnableTableAction,
 postCompletedMergeRegionsAction,
 postCompletedModifyTableAction,
 postCompletedSplitRegionAction,
 postCompletedTruncateTableAction, 

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ProcedureState.html 
b/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
index c087a98..8fae66b 100644
--- a/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
+++ b/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -359,7 +359,7 @@ not permitted.)
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/org/apache/hadoop/hbase/RawCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/RawCell.html 
b/devapidocs/org/apache/hadoop/hbase/RawCell.html
new file mode 100644
index 000..b7bbbc8
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/RawCell.html
@@ -0,0 +1,355 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+RawCell (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":17,"i1":18,"i2":18,"i3":18};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],16:["t5","Default Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Interface RawCell
+
+
+
+
+
+
+All Superinterfaces:
+Cell
+
+
+All Known Subinterfaces:
+ExtendedCell
+
+
+All Known Implementing Classes:
+BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 ByteBufferChunkCell, 
ByteBufferKeyValue, IndividualBytesFieldCell, KeyValue, KeyValue.KeyOnlyKeyValue, MapReduceCell, NoTagByteBufferChunkCell, NoTagsByteBufferKeyValue, NoTagsKeyValue, PrivateCellUtil.TagRewriteByteBufferCell, PrivateCellUtil.TagRewriteCell, PrivateCellUtil.ValueAndTagRewriteByteBufferCell, 
PrivateCellUtil.ValueAndTagRewriteCell, SizeCachedKeyValue, SizeCachedNoTagsKeyValue
+
+
+
+@InterfaceAudience.LimitedPrivate(value="Coprocesssor")
+public interface RawCell
+extends Cell
+An extended version of cell that gives more power to 
CPs
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static int
+MAX_TAGS_LENGTH
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsInstance MethodsDefault Methods
+
+Modifier and Type
+Method and Description
+
+
+static void
+checkForTagsLength(inttagsLength)
+Check the length of tags.
+
+
+
+default byte[]
+cloneTags()
+Allows cloning the tags in the cell to a new byte[]
+
+
+
+default http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
+getTag(bytetype)
+Returns the specific tag of the given type
+
+
+
+default http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
+getTags()
+Creates a list of tags in the current cell
+
+
+
+
+
+
+
+Methods inherited from interfaceorg.apache.hadoop.hbase.Cell
+getFamilyArray,
 getFamilyLength,
 getFamilyOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowArray,
 getRowLength,
 getRowOffset,
 getSequenceId,
 getTagsArray, getTagsLength,
 getTagsOffset,
 getTimestamp,
 getTypeByte,
 getValueArray,
 getValueLength,
 getValueOffset
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+MAX_TAGS_LENGTH
+static finalint MAX_TAGS_LENGTH
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+cloneTags
+defaultbyte[]cloneTags()
+Allows cloning the tags in the cell to a new byte[]
+
+Returns:
+the byte[] having the tags
+
+
+
+
+
+
+
+
+getTags
+defaulthttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTaggetTags()
+Creates a list of tags in the current cell
+
+Returns:
+a list of tags
+
+
+
+
+
+
+
+
+getTag
+defaulthttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in 

  1   2   3   >