[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-08-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/346adc37/devapidocs/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
index 1ac915d..c7dff41 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":9,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":42,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":10,"i59":10,"i60":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":9,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":42,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":42,"i51":42,"i52":42,"i53":42,"i54":10,"i55":10,"i56":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -198,14 +198,6 @@ implements 
 
 
-(package private) BufferedMutatorImpl
-mutator
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-mutatorLock
-
-
 private int
 operationTimeout
 
@@ -242,10 +234,6 @@ implements tableName
 
 
-private long
-writeBufferSize
-
-
 private int
 writeRpcTimeout
 
@@ -266,13 +254,6 @@ implements 
 
 protected 
-HTable(ClusterConnectionconn,
-  BufferedMutatorImplmutator)
-For internal testing.
-
-
-
-protected 
 HTable(ClusterConnectionconnection,
   TableBuilderBasebuilder,
   RpcRetryingCallerFactoryrpcCallerFactory,
@@ -483,54 +464,46 @@ implements 
 
 
-(package private) void
-flushCommits()
-
-
 Result
 get(Getget)
 Extracts certain cells from a given row.
 
 
-
+
 private Result
 get(Getget,
booleancheckExistenceOnly)
 
-
+
 Result[]
 get(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListGetgets)
 Extracts certain cells from the given rows, in batch.
 
 
-
-(package private) BufferedMutator
-getBufferedMutator()
-
-
+
 org.apache.hadoop.conf.Configuration
 getConfiguration()
 Returns the Configuration object used by this 
instance.
 
 
-
+
 protected Connection
 getConnection()
 INTERNAL Used by unit tests and tools to do 
low-level
  manipulations.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ThreadPoolExecutor
 getDefaultExecutor(org.apache.hadoop.conf.Configurationconf)
 
-
+
 TableDescriptor
 getDescriptor()
 Gets the table 
descriptor for this table.
 
 
-
+
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
 getKeysAndRegionsInRange(byte[]startKey,
 byte[]endKey,
@@ -539,7 +512,7 @@ implements 
 
 
-
+
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
 getKeysAndRegionsInRange(byte[]startKey,
 byte[]endKey,
@@ -549,93 +522,87 @@ implements 
 
 
-
+
 static int
 getMaxKeyValueSize(org.apache.hadoop.conf.Configurationconf)
 
-
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorService
 getPool()
 The pool is used for mutli requests for this HTable
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-08-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2aec596e/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
index 8b561ba..da02cae 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/DependentColumnFilter.html
@@ -29,273 +29,265 @@
 021import java.io.IOException;
 022import java.util.ArrayList;
 023import java.util.HashSet;
-024import java.util.Iterator;
-025import java.util.List;
-026import java.util.Set;
-027
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-034import 
org.apache.hadoop.hbase.util.Bytes;
-035
-036import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-037
-038import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-039import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-040
-041/**
-042 * A filter for adding inter-column 
timestamp matching
-043 * Only cells with a correspondingly 
timestamped entry in
-044 * the target column will be retained
-045 * Not compatible with Scan.setBatch as 
operations need 
-046 * full rows for correct filtering 
-047 */
-048@InterfaceAudience.Public
-049public class DependentColumnFilter 
extends CompareFilter {
-050
-051  protected byte[] columnFamily;
-052  protected byte[] columnQualifier;
-053  protected boolean 
dropDependentColumn;
-054
-055  protected SetLong stampSet = 
new HashSet();
-056  
-057  /**
-058   * Build a dependent column filter with 
value checking
-059   * dependent column varies will be 
compared using the supplied
-060   * compareOp and comparator, for usage 
of which
-061   * refer to {@link CompareFilter}
-062   * 
-063   * @param family dependent column 
family
-064   * @param qualifier dependent column 
qualifier
-065   * @param dropDependentColumn whether 
the column should be discarded after
-066   * @param valueCompareOp comparison op 

-067   * @param valueComparator comparator
-068   */
-069  public DependentColumnFilter(final byte 
[] family, final byte[] qualifier,
-070  final boolean dropDependentColumn, 
final CompareOp valueCompareOp,
-071final ByteArrayComparable 
valueComparator) {
-072// set up the comparator   
-073super(valueCompareOp, 
valueComparator);
-074this.columnFamily = family;
-075this.columnQualifier = qualifier;
-076this.dropDependentColumn = 
dropDependentColumn;
-077  }
-078  
-079  /**
-080   * Constructor for DependentColumn 
filter.
-081   * Cells where a Cell from target 
column
-082   * with the same timestamp do not exist 
will be dropped.
-083   *
-084   * @param family name of target column 
family
-085   * @param qualifier name of column 
qualifier
-086   */
-087  public DependentColumnFilter(final byte 
[] family, final byte [] qualifier) {
-088this(family, qualifier, false);
-089  }
-090  
-091  /**
-092   * Constructor for DependentColumn 
filter.
-093   * Cells where a Cell from target 
column
-094   * with the same timestamp do not exist 
will be dropped.
-095   *
-096   * @param family name of dependent 
column family
-097   * @param qualifier name of dependent 
qualifier
-098   * @param dropDependentColumn whether 
the dependent columns Cells should be discarded
-099   */
-100  public DependentColumnFilter(final byte 
[] family, final byte [] qualifier,
-101  final boolean dropDependentColumn) 
{
-102this(family, qualifier, 
dropDependentColumn, CompareOp.NO_OP, null);
-103  }
-104
-105  /**
-106   * @return the column family
-107   */
-108  public byte[] getFamily() {
-109return this.columnFamily;
-110  }
-111
-112  /**
-113   * @return the column qualifier
-114   */
-115  public byte[] getQualifier() {
-116return this.columnQualifier;
-117  }
-118
-119  /**
-120   * @return true if we should drop the 
dependent column, false otherwise
-121   */
-122  public boolean dropDependentColumn() 
{
-123return this.dropDependentColumn;
-124  }
-125
-126  public boolean getDropDependentColumn() 
{
-127return this.dropDependentColumn;
-128  }
-129
-130  @Override
-131  public boolean filterAllRemaining() {
-132return false;
-133  }
-134
-135  @Override
-136  public ReturnCode filterKeyValue(Cell 
c) {
-137// Check if the column and qualifier 
match
-138if (!CellUtil.matchingColumn(c, 
this.columnFamily, this.columnQualifier)) {
-139// include non-matches for the 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
index 35d5549..7f42873 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateTableProcedureBiConsumer.html
@@ -115,2816 +115,2814 @@
 107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
 109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-150import 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a2b2dd19/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 5c95397..860416b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -293,7944 +293,7962 @@
 285  final AtomicLong compactionsFailed = 
new AtomicLong(0L);
 286  final AtomicLong 
compactionNumFilesCompacted = new AtomicLong(0L);
 287  final AtomicLong 
compactionNumBytesCompacted = new AtomicLong(0L);
-288
-289  private final WAL wal;
-290  private final HRegionFileSystem fs;
-291  protected final Configuration conf;
-292  private final Configuration baseConf;
-293  private final int 
rowLockWaitDuration;
-294  static final int 
DEFAULT_ROWLOCK_WAIT_DURATION = 3;
-295
-296  // The internal wait duration to 
acquire a lock before read/update
-297  // from the region. It is not per row. 
The purpose of this wait time
-298  // is to avoid waiting a long time 
while the region is busy, so that
-299  // we can release the IPC handler soon 
enough to improve the
-300  // availability of the region server. 
It can be adjusted by
-301  // tuning configuration 
"hbase.busy.wait.duration".
-302  final long busyWaitDuration;
-303  static final long 
DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
-304
-305  // If updating multiple rows in one 
call, wait longer,
-306  // i.e. waiting for busyWaitDuration * 
# of rows. However,
-307  // we can limit the max multiplier.
-308  final int maxBusyWaitMultiplier;
-309
-310  // Max busy wait duration. There is no 
point to wait longer than the RPC
-311  // purge timeout, when a RPC call will 
be terminated by the RPC engine.
-312  final long maxBusyWaitDuration;
-313
-314  // Max cell size. If nonzero, the 
maximum allowed size for any given cell
-315  // in bytes
-316  final long maxCellSize;
-317
-318  // negative number indicates infinite 
timeout
-319  static final long 
DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
-320  final ExecutorService 
rowProcessorExecutor = Executors.newCachedThreadPool();
-321
-322  private final 
ConcurrentHashMapRegionScanner, Long scannerReadPoints;
+288  final AtomicLong compactionsQueued = 
new AtomicLong(0L);
+289  final AtomicLong flushesQueued = new 
AtomicLong(0L);
+290
+291  private final WAL wal;
+292  private final HRegionFileSystem fs;
+293  protected final Configuration conf;
+294  private final Configuration baseConf;
+295  private final int 
rowLockWaitDuration;
+296  static final int 
DEFAULT_ROWLOCK_WAIT_DURATION = 3;
+297
+298  // The internal wait duration to 
acquire a lock before read/update
+299  // from the region. It is not per row. 
The purpose of this wait time
+300  // is to avoid waiting a long time 
while the region is busy, so that
+301  // we can release the IPC handler soon 
enough to improve the
+302  // availability of the region server. 
It can be adjusted by
+303  // tuning configuration 
"hbase.busy.wait.duration".
+304  final long busyWaitDuration;
+305  static final long 
DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
+306
+307  // If updating multiple rows in one 
call, wait longer,
+308  // i.e. waiting for busyWaitDuration * 
# of rows. However,
+309  // we can limit the max multiplier.
+310  final int maxBusyWaitMultiplier;
+311
+312  // Max busy wait duration. There is no 
point to wait longer than the RPC
+313  // purge timeout, when a RPC call will 
be terminated by the RPC engine.
+314  final long maxBusyWaitDuration;
+315
+316  // Max cell size. If nonzero, the 
maximum allowed size for any given cell
+317  // in bytes
+318  final long maxCellSize;
+319
+320  // negative number indicates infinite 
timeout
+321  static final long 
DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
+322  final ExecutorService 
rowProcessorExecutor = Executors.newCachedThreadPool();
 323
-324  /**
-325   * The sequence ID that was 
enLongAddered when this region was opened.
-326   */
-327  private long openSeqNum = 
HConstants.NO_SEQNUM;
-328
-329  /**
-330   * The default setting for whether to 
enable on-demand CF loading for
-331   * scan requests to this region. 
Requests can override it.
-332   */
-333  private boolean 
isLoadingCfsOnDemandDefault = false;
-334
-335  private final AtomicInteger 
majorInProgress = new AtomicInteger(0);
-336  private final AtomicInteger 
minorInProgress = new AtomicInteger(0);
-337
-338  //
-339  // Context: During replay we want to 
ensure that we do not lose any data. So, we
-340  // have to be conservative in how we 
replay wals. For each store, we calculate
-341  // the maxSeqId up to which 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21766f4a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
index 0179a3c..e8ff73a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AssignmentManager
+public class AssignmentManager
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ServerListener
 The AssignmentManager is the coordinator for region 
assign/unassign operations.
@@ -950,7 +950,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -959,7 +959,7 @@ implements 
 
 BOOTSTRAP_THREAD_POOL_SIZE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BOOTSTRAP_THREAD_POOL_SIZE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BOOTSTRAP_THREAD_POOL_SIZE_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -972,7 +972,7 @@ implements 
 
 ASSIGN_DISPATCH_WAIT_MSEC_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASSIGN_DISPATCH_WAIT_MSEC_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASSIGN_DISPATCH_WAIT_MSEC_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -985,7 +985,7 @@ implements 
 
 DEFAULT_ASSIGN_DISPATCH_WAIT_MSEC
-private static finalint DEFAULT_ASSIGN_DISPATCH_WAIT_MSEC
+private static finalint DEFAULT_ASSIGN_DISPATCH_WAIT_MSEC
 
 See Also:
 Constant
 Field Values
@@ -998,7 +998,7 @@ implements 
 
 ASSIGN_DISPATCH_WAITQ_MAX_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASSIGN_DISPATCH_WAITQ_MAX_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASSIGN_DISPATCH_WAITQ_MAX_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -1011,7 +1011,7 @@ implements 
 
 DEFAULT_ASSIGN_DISPATCH_WAITQ_MAX
-private static finalint DEFAULT_ASSIGN_DISPATCH_WAITQ_MAX
+private static finalint DEFAULT_ASSIGN_DISPATCH_WAITQ_MAX
 
 See Also:
 Constant
 Field Values
@@ -1024,7 +1024,7 @@ implements 
 
 RIT_CHORE_INTERVAL_MSEC_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RIT_CHORE_INTERVAL_MSEC_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RIT_CHORE_INTERVAL_MSEC_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -1037,7 +1037,7 @@ implements 
 
 DEFAULT_RIT_CHORE_INTERVAL_MSEC
-private static finalint DEFAULT_RIT_CHORE_INTERVAL_MSEC
+private static finalint DEFAULT_RIT_CHORE_INTERVAL_MSEC
 
 See Also:
 Constant
 Field Values
@@ -1050,7 +1050,7 @@ implements 
 
 ASSIGN_MAX_ATTEMPTS
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASSIGN_MAX_ATTEMPTS
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASSIGN_MAX_ATTEMPTS
 
 See Also:
 Constant
 Field Values
@@ -1063,7 +1063,7 @@ implements 
 
 DEFAULT_ASSIGN_MAX_ATTEMPTS
-private static finalint DEFAULT_ASSIGN_MAX_ATTEMPTS
+private static finalint DEFAULT_ASSIGN_MAX_ATTEMPTS
 
 See Also:
 Constant
 Field Values
@@ -1076,7 +1076,7 @@ implements 
 
 METRICS_RIT_STUCK_WARNING_THRESHOLD
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_RIT_STUCK_WARNING_THRESHOLD
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_RIT_STUCK_WARNING_THRESHOLD
 Region in Transition metrics threshold time
 
 See Also:
@@ -1090,7 +1090,7 @@ implements 
 
 DEFAULT_RIT_STUCK_WARNING_THRESHOLD
-private static finalint DEFAULT_RIT_STUCK_WARNING_THRESHOLD
+private static 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2d5075d7/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
index 849797f..334f498 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class ProcedureExecutor.CompletedProcedureCleanerTEnvironment
+private static class ProcedureExecutor.CompletedProcedureCleanerTEnvironment
 extends ProcedureInMemoryChoreTEnvironment
 Internal cleaner that removes the completed procedure 
results after a TTL.
  NOTE: This is a special case handled in timeoutLoop().
@@ -186,7 +186,7 @@ extends CLEANER_INTERVAL_CONF_KEY
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureInfo
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainer
 completed
 
 
@@ -253,7 +253,7 @@ extends 
 CompletedProcedureCleaner(org.apache.hadoop.conf.Configurationconf,
  ProcedureStorestore,
- http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureInfocompletedMap,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,ProcedureExecutor.CompletedProcedureRetainercompletedMap,
  http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapNonceKey,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in 
java.lang">LongnonceKeysToProcIdsMap)
 
 
@@ -288,7 +288,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate, setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes, setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails, toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getProcName,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState, getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout, haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure, setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
index d65eedb..65f9c66 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -406,40 +406,32 @@ implements Useruser)
 
 
-void
-requestRegionsMerge(Regiona,
-   Regionb,
-   booleanforcible,
-   longmasterSystemTime,
-   Useruser)
-
-
 boolean
 requestSplit(Regionr)
 
-
+
 void
 requestSplit(Regionr,
 byte[]midKey)
 
-
+
 void
 requestSplit(Regionr,
 byte[]midKey,
 Useruser)
 
-
+
 void
 requestSystemCompaction(Regionr,
Stores,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy)
 
-
+
 void
 requestSystemCompaction(Regionr,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy)
 
-
+
 private CompactionContext
 selectCompaction(Regionr,
 Stores,
@@ -447,19 +439,19 @@ implements CompactionRequestrequest,
 Useruser)
 
-
+
 private boolean
 shouldSplitRegion()
 
-
+
 (package private) void
 shutdownLongCompactions()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
-
+
 private void
 waitFor(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ThreadPoolExecutort,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
@@ -751,26 +743,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringdumpQueue()
 
 
-
-
-
-
-
-requestRegionsMerge
-publicvoidrequestRegionsMerge(Regiona,
-Regionb,
-booleanforcible,
-longmasterSystemTime,
-Useruser)
-
-
 
 
 
 
 
 requestSplit
-publicbooleanrequestSplit(Regionr)
+publicbooleanrequestSplit(Regionr)
 
 
 
@@ -779,7 +758,7 @@ implements 
 
 requestSplit
-publicvoidrequestSplit(Regionr,
+publicvoidrequestSplit(Regionr,
  byte[]midKey)
 
 
@@ -789,7 +768,7 @@ implements 
 
 requestSplit
-publicvoidrequestSplit(Regionr,
+publicvoidrequestSplit(Regionr,
  byte[]midKey,
  Useruser)
 
@@ -800,7 +779,7 @@ implements 
 
 requestCompaction
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCompactionRequestrequestCompaction(Regionr,
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCompactionRequestrequestCompaction(Regionr,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -823,7 +802,7 @@ implements 
 
 requestCompaction
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCompactionRequestrequestCompaction(Regionr,
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f391bcef/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index c628528..2c8bb5a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -79,501 +79,504 @@
 071  // familyMap
 072  ClassSize.REFERENCE +
 073  // familyMap
-074  ClassSize.TREEMAP);
-075
-076  /**
-077   * The attribute for storing the list 
of clusters that have consumed the change.
-078   */
-079  private static final String 
CONSUMED_CLUSTER_IDS = "_cs.id";
-080
-081  /**
-082   * The attribute for storing TTL for 
the result of the mutation.
-083   */
-084  private static final String 
OP_ATTRIBUTE_TTL = "_ttl";
-085
-086  private static final String 
RETURN_RESULTS = "_rr_";
-087
-088  protected byte [] row = null;
-089  protected long ts = 
HConstants.LATEST_TIMESTAMP;
-090  protected Durability durability = 
Durability.USE_DEFAULT;
-091
-092  // A Map sorted by column family.
-093  protected NavigableMapbyte [], 
ListCell familyMap = new TreeMap(Bytes.BYTES_COMPARATOR);
+074  ClassSize.TREEMAP +
+075  // priority
+076  ClassSize.INTEGER
+077  );
+078
+079  /**
+080   * The attribute for storing the list 
of clusters that have consumed the change.
+081   */
+082  private static final String 
CONSUMED_CLUSTER_IDS = "_cs.id";
+083
+084  /**
+085   * The attribute for storing TTL for 
the result of the mutation.
+086   */
+087  private static final String 
OP_ATTRIBUTE_TTL = "_ttl";
+088
+089  private static final String 
RETURN_RESULTS = "_rr_";
+090
+091  protected byte [] row = null;
+092  protected long ts = 
HConstants.LATEST_TIMESTAMP;
+093  protected Durability durability = 
Durability.USE_DEFAULT;
 094
-095  @Override
-096  public CellScanner cellScanner() {
-097return 
CellUtil.createCellScanner(getFamilyCellMap());
-098  }
-099
-100  /**
-101   * Creates an empty list if one doesn't 
exist for the given column family
-102   * or else it returns the associated 
list of Cell objects.
-103   *
-104   * @param family column family
-105   * @return a list of Cell objects, 
returns an empty list if one doesn't exist.
-106   */
-107  ListCell getCellList(byte[] 
family) {
-108ListCell list = 
this.familyMap.get(family);
-109if (list == null) {
-110  list = new ArrayList();
-111}
-112return list;
-113  }
-114
-115  /*
-116   * Create a KeyValue with this objects 
row key and the Put identifier.
-117   *
-118   * @return a KeyValue with this objects 
row key and the Put identifier.
-119   */
-120  KeyValue createPutKeyValue(byte[] 
family, byte[] qualifier, long ts, byte[] value) {
-121return new KeyValue(this.row, family, 
qualifier, ts, KeyValue.Type.Put, value);
-122  }
-123
-124  /**
-125   * Create a KeyValue with this objects 
row key and the Put identifier.
-126   * @param family
-127   * @param qualifier
-128   * @param ts
-129   * @param value
-130   * @param tags - Specify the Tags as an 
Array
-131   * @return a KeyValue with this objects 
row key and the Put identifier.
-132   */
-133  KeyValue createPutKeyValue(byte[] 
family, byte[] qualifier, long ts, byte[] value, Tag[] tags) {
-134KeyValue kvWithTag = new 
KeyValue(this.row, family, qualifier, ts, value, tags);
-135return kvWithTag;
-136  }
-137
-138  /*
-139   * Create a KeyValue with this objects 
row key and the Put identifier.
-140   *
-141   * @return a KeyValue with this objects 
row key and the Put identifier.
-142   */
-143  KeyValue createPutKeyValue(byte[] 
family, ByteBuffer qualifier, long ts, ByteBuffer value,
-144  Tag[] tags) {
-145return new KeyValue(this.row, 0, 
this.row == null ? 0 : this.row.length,
-146family, 0, family == null ? 0 : 
family.length,
-147qualifier, ts, KeyValue.Type.Put, 
value, tags != null ? Arrays.asList(tags) : null);
-148  }
-149
-150  /**
-151   * Compile the column family (i.e. 
schema) information
-152   * into a Map. Useful for parsing and 
aggregation by debugging,
-153   * logging, and administration tools.
-154   * @return Map
-155   */
-156  @Override
-157  public MapString, Object 
getFingerprint() {
-158MapString, Object map = new 
HashMap();
-159ListString families = new 
ArrayList(this.familyMap.entrySet().size());
-160// ideally, we would also include 
table information, but that information
-161// is not stored in each Operation 
instance.
-162map.put("families", families);
-163for (Map.Entrybyte [], 
ListCell entry : this.familyMap.entrySet()) {
-164  
families.add(Bytes.toStringBinary(entry.getKey()));
-165}
-166return map;
-167  }
-168
-169  /**
-170   * Compile the details beyond the scope 
of getFingerprint (row, columns,
-171 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html 
b/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
index 16c16fc..949cd54 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.html
@@ -187,7 +187,7 @@ extends 
 static StartcodeAgnosticServerName
-valueOf(com.google.common.net.HostAndPorthostnameAndPort,
+valueOf(org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPorthostnameAndPort,
longstartcode)
 
 
@@ -266,13 +266,13 @@ extends 
+
 
 
 
 
 valueOf
-public staticStartcodeAgnosticServerNamevalueOf(com.google.common.net.HostAndPorthostnameAndPort,
+public staticStartcodeAgnosticServerNamevalueOf(org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPorthostnameAndPort,
   longstartcode)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
 
b/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
index fd0601d..e18633b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/favored/class-use/StartcodeAgnosticServerName.html
@@ -104,7 +104,7 @@
 
 
 static StartcodeAgnosticServerName
-StartcodeAgnosticServerName.valueOf(com.google.common.net.HostAndPorthostnameAndPort,
+StartcodeAgnosticServerName.valueOf(org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPorthostnameAndPort,
longstartcode)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html 
b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index 4ef40ce..705e2b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -319,7 +319,7 @@ the order they are declared.
 
 
 values
-public staticCompareFilter.CompareOp[]values()
+public staticCompareFilter.CompareOp[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -339,7 +339,7 @@ for (CompareFilter.CompareOp c : 
CompareFilter.CompareOp.values())
 
 
 valueOf
-public staticCompareFilter.CompareOpvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticCompareFilter.CompareOpvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 20ffc60..a6e9a0d 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -175,14 +175,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
 org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9eba7fcf/devapidocs/src-html/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.ZkSplitTaskDetails.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.ZkSplitTaskDetails.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.ZkSplitTaskDetails.html
index 5eefe01..59063f5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.ZkSplitTaskDetails.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.ZkSplitTaskDetails.html
@@ -58,606 +58,607 @@
 050import 
org.apache.hadoop.hbase.util.CancelableProgressable;
 051import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 052import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-053import 
org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
-054import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-055import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperListener;
-056import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-057import 
org.apache.hadoop.util.StringUtils;
-058import 
org.apache.zookeeper.AsyncCallback;
-059import 
org.apache.zookeeper.KeeperException;
-060import org.apache.zookeeper.data.Stat;
-061
-062/**
-063 * ZooKeeper based implementation of 
{@link SplitLogWorkerCoordination}
-064 * It listen for changes in ZooKeeper 
and
-065 *
-066 */
-067@InterfaceAudience.Private
-068public class ZkSplitLogWorkerCoordination 
extends ZooKeeperListener implements
-069SplitLogWorkerCoordination {
-070
-071  private static final Log LOG = 
LogFactory.getLog(ZkSplitLogWorkerCoordination.class);
-072
-073  private static final int checkInterval 
= 5000; // 5 seconds
-074  private static final int 
FAILED_TO_OWN_TASK = -1;
-075
-076  private  SplitLogWorker worker;
-077
-078  private TaskExecutor 
splitTaskExecutor;
-079
-080  private final Object taskReadyLock = 
new Object();
-081  private AtomicInteger taskReadySeq = 
new AtomicInteger(0);
-082  private volatile String currentTask = 
null;
-083  private int currentVersion;
-084  private volatile boolean shouldStop = 
false;
-085  private final Object grabTaskLock = new 
Object();
-086  private boolean workerInGrabTask = 
false;
-087  private int reportPeriod;
-088  private RegionServerServices server = 
null;
-089  protected final AtomicInteger 
tasksInProgress = new AtomicInteger(0);
-090  private int maxConcurrentTasks = 0;
-091
-092  private final ZkCoordinatedStateManager 
manager;
-093
-094  public 
ZkSplitLogWorkerCoordination(ZkCoordinatedStateManager 
zkCoordinatedStateManager,
-095  ZooKeeperWatcher watcher) {
-096super(watcher);
-097manager = 
zkCoordinatedStateManager;
-098
-099  }
-100
-101  /**
-102   * Override handler from {@link 
ZooKeeperListener}
-103   */
-104  @Override
-105  public void nodeChildrenChanged(String 
path) {
-106if 
(path.equals(watcher.znodePaths.splitLogZNode)) {
-107  if (LOG.isTraceEnabled()) 
LOG.trace("tasks arrived or departed on " + path);
-108  synchronized (taskReadyLock) {
-109
this.taskReadySeq.incrementAndGet();
-110taskReadyLock.notify();
-111  }
-112}
-113  }
-114
-115  /**
-116   * Override handler from {@link 
ZooKeeperListener}
-117   */
-118  @Override
-119  public void nodeDataChanged(String 
path) {
-120// there will be a self generated 
dataChanged event every time attemptToOwnTask()
-121// heartbeats the task znode by 
upping its version
-122synchronized (grabTaskLock) {
-123  if (workerInGrabTask) {
-124// currentTask can change
-125String taskpath = currentTask;
-126if (taskpath != null  
taskpath.equals(path)) {
-127  getDataSetWatchAsync();
-128}
-129  }
-130}
-131  }
-132
-133  /**
-134   * Override setter from {@link 
SplitLogWorkerCoordination}
-135   */
-136  @Override
-137  public void init(RegionServerServices 
server, Configuration conf,
-138  TaskExecutor splitExecutor, 
SplitLogWorker worker) {
-139this.server = server;
-140this.worker = worker;
-141this.splitTaskExecutor = 
splitExecutor;
-142maxConcurrentTasks = 
conf.getInt("hbase.regionserver.wal.max.splitters", DEFAULT_MAX_SPLITTERS);
-143reportPeriod =
-144
conf.getInt("hbase.splitlog.report.period",
-145  
conf.getInt(HConstants.HBASE_SPLITLOG_MANAGER_TIMEOUT,
-146
ZKSplitLogManagerCoordination.DEFAULT_TIMEOUT) / 3);
-147  }
-148
-149  /* Support functions for ZooKeeper 
async callback */
-150
-151  void getDataSetWatchFailure(String 
path) {
-152synchronized (grabTaskLock) {
-153  if (workerInGrabTask) {
-154// currentTask can change but 
that's ok
-155String taskpath = currentTask;
-156if (taskpath != null  
taskpath.equals(path)) {
-157  LOG.info("retrying 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/17128d27/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 43e4ace..b6eab85 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RawAsyncHBaseAdmin
+public class RawAsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -201,14 +201,18 @@ implements RawAsyncHBaseAdmin.ProcedureBiConsumer
 
 
+private class
+RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer
+
+
 private static interface
 RawAsyncHBaseAdmin.TableOperator
 
-
+
 private class
 RawAsyncHBaseAdmin.TableProcedureBiConsumer
 
-
+
 private class
 RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer
 
@@ -1072,8 +1076,7 @@ implements 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-split(ServerNamesn,
- HRegionInfohri,
+split(HRegionInfohri,
  http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in 
java.util">Optionalbyte[]splitPoint)
 
 
@@ -1193,7 +1196,7 @@ implements 
 
 FLUSH_TABLE_PROCEDURE_SIGNATURE
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FLUSH_TABLE_PROCEDURE_SIGNATURE
 
 See Also:
 Constant
 Field Values
@@ -1206,7 +1209,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -1215,7 +1218,7 @@ implements 
 
 connection
-private finalAsyncConnectionImpl connection
+private finalAsyncConnectionImpl connection
 
 
 
@@ -1224,7 +1227,7 @@ implements 
 
 metaTable
-private finalRawAsyncTable metaTable
+private finalRawAsyncTable metaTable
 
 
 
@@ -1233,7 +1236,7 @@ implements 
 
 rpcTimeoutNs
-private finallong rpcTimeoutNs
+private finallong rpcTimeoutNs
 
 
 
@@ -1242,7 +1245,7 @@ implements 
 
 operationTimeoutNs
-private finallong operationTimeoutNs
+private finallong operationTimeoutNs
 
 
 
@@ -1251,7 +1254,7 @@ implements 
 
 pauseNs
-private finallong pauseNs
+private finallong pauseNs
 
 
 
@@ -1260,7 +1263,7 @@ implements 
 
 maxAttempts
-private finalint maxAttempts
+private finalint maxAttempts
 
 
 
@@ -1269,7 +1272,7 @@ implements 
 
 startLogErrorsCnt
-private finalint startLogErrorsCnt
+private finalint startLogErrorsCnt
 
 
 
@@ -1278,7 +1281,7 @@ implements 
 
 ng
-private finalNonceGenerator ng
+private finalNonceGenerator ng
 
 
 
@@ -1295,7 +1298,7 @@ implements 
 
 RawAsyncHBaseAdmin
-RawAsyncHBaseAdmin(AsyncConnectionImplconnection,
+RawAsyncHBaseAdmin(AsyncConnectionImplconnection,
AsyncAdminBuilderBase?builder)
 
 
@@ -1313,7 +1316,7 @@ implements 
 
 newMasterCaller
-privateTAsyncRpcRetryingCallerFactory.MasterRequestCallerBuilderTnewMasterCaller()
+privateTAsyncRpcRetryingCallerFactory.MasterRequestCallerBuilderTnewMasterCaller()
 
 
 
@@ -1322,7 +1325,7 @@ implements 
 
 newAdminCaller
-privateTAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTnewAdminCaller()
+privateTAsyncRpcRetryingCallerFactory.AdminRequestCallerBuilderTnewAdminCaller()
 
 
 
@@ -1333,7 +1336,7 @@ implements 
 
 call
-privatePREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureRESPcall(HBaseRpcControllercontroller,
+privatePREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureRESPcall(HBaseRpcControllercontroller,

org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interfacestub,
PREQpreq,
RawAsyncHBaseAdmin.MasterRpcCallPRESP,PREQrpcCall,
@@ -1348,7 +1351,7 @@ implements 
 
 adminCall

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2777c693/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
index 0a32350..cf44d69 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
@@ -75,735 +75,796 @@
 067import 
org.apache.hadoop.conf.Configuration;
 068import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
 069import 
org.apache.hadoop.crypto.Encryptor;
-070import org.apache.hadoop.fs.FileSystem;
-071import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-072import org.apache.hadoop.fs.Path;
-073import 
org.apache.hadoop.fs.UnresolvedLinkException;
-074import 
org.apache.hadoop.fs.permission.FsPermission;
-075import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-076import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-077import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-078import 
org.apache.hadoop.hbase.util.FSUtils;
-079import 
org.apache.hadoop.hdfs.DFSClient;
-080import 
org.apache.hadoop.hdfs.DFSOutputStream;
-081import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-082import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-083import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-084import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-085import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-086import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-088import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-092import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-093import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-100import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-102import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-103import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-104import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-105import 
org.apache.hadoop.io.EnumSetWritable;
-106import 
org.apache.hadoop.ipc.RemoteException;
-107import org.apache.hadoop.net.NetUtils;
-108import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-109import 
org.apache.hadoop.security.token.Token;
-110import 
org.apache.hadoop.util.DataChecksum;
-111
-112/**
-113 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-114 */
-115@InterfaceAudience.Private
-116public final class 
FanOutOneBlockAsyncDFSOutputHelper {
-117
-118  private static final Log LOG = 
LogFactory.getLog(FanOutOneBlockAsyncDFSOutputHelper.class);
-119
-120  private 
FanOutOneBlockAsyncDFSOutputHelper() {
-121  }
-122
-123  // use pooled allocator for 
performance.
-124  private static final ByteBufAllocator 
ALLOC = PooledByteBufAllocator.DEFAULT;
-125
-126  // copied from DFSPacket since it is 
package private.
-127  public static final long 
HEART_BEAT_SEQNO = -1L;
-128
-129  // Timeouts for communicating with 
DataNode for streaming writes/reads
-130  public static final int READ_TIMEOUT = 
60 * 1000;
-131  public static final int 
READ_TIMEOUT_EXTENSION = 5 * 1000;
-132  public static final int WRITE_TIMEOUT = 
8 * 60 * 1000;
-133
-134  // helper class for getting Status from 
PipelineAckProto. In hadoop 2.6 or before, there is a
-135  // getStatus method, and for hadoop 2.7 
or after, the status is retrieved from flag. The flag may
-136  // get from proto directly, or combined 
by the reply field of the proto and a ECN object. See
-137  // createPipelineAckStatusGetter for 
more details.
-138  private interface 
PipelineAckStatusGetter {
-139Status get(PipelineAckProto 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
index 70b52cd..f5c93d7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.EndpointOperationWithResult.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class RegionCoprocessorHost.EndpointOperationWithResultT
+private abstract static class RegionCoprocessorHost.EndpointOperationWithResultT
 extends RegionCoprocessorHost.EndpointOperation
 
 
@@ -243,7 +243,7 @@ extends 
 
 result
-privateT result
+privateT result
 
 
 
@@ -260,7 +260,7 @@ extends 
 
 EndpointOperationWithResult
-privateEndpointOperationWithResult()
+privateEndpointOperationWithResult()
 
 
 
@@ -279,7 +279,7 @@ extends 
 
 setResult
-publicvoidsetResult(Tresult)
+publicvoidsetResult(Tresult)
 
 
 
@@ -288,7 +288,7 @@ extends 
 
 getResult
-publicTgetResult()
+publicTgetResult()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
index ca9d09d..d3c6557 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class RegionCoprocessorHost.RegionOperation
+private abstract static class RegionCoprocessorHost.RegionOperation
 extends RegionCoprocessorHost.CoprocessorOperation
 
 
@@ -223,7 +223,7 @@ extends 
 
 RegionOperation
-publicRegionOperation()
+publicRegionOperation()
 
 
 
@@ -232,7 +232,7 @@ extends 
 
 RegionOperation
-publicRegionOperation(Useruser)
+publicRegionOperation(Useruser)
 
 
 
@@ -249,7 +249,7 @@ extends 
 
 call
-public abstractvoidcall(RegionObserverobserver,
+public abstractvoidcall(RegionObserverobserver,
   ObserverContextRegionCoprocessorEnvironmentctx)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -264,7 +264,7 @@ extends 
 
 hasCall
-publicbooleanhasCall(Coprocessorobserver)
+publicbooleanhasCall(Coprocessorobserver)
 
 Specified by:
 hasCallin
 classRegionCoprocessorHost.CoprocessorOperation
@@ -277,7 +277,7 @@ extends 
 
 call
-publicvoidcall(Coprocessorobserver,
+publicvoidcall(Coprocessorobserver,
  ObserverContextRegionCoprocessorEnvironmentctx)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
index 54c84c2..eb93fd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperationWithResult.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract static class RegionCoprocessorHost.RegionOperationWithResultT
+private abstract static class RegionCoprocessorHost.RegionOperationWithResultT
 extends RegionCoprocessorHost.RegionOperation
 
 
@@ -244,7 +244,7 @@ extends 
 
 result
-privateT result
+privateT result
 
 
 
@@ -261,7 +261,7 @@ extends 
 
 RegionOperationWithResult
-publicRegionOperationWithResult()
+publicRegionOperationWithResult()
 
 
 
@@ -270,7 +270,7 @@ extends 
 
 RegionOperationWithResult
-publicRegionOperationWithResult(Useruser)
+publicRegionOperationWithResult(Useruser)
 
 
 
@@ -289,7 +289,7 @@ extends 
 
 setResult
-publicvoidsetResult(Tresult)
+publicvoidsetResult(Tresult)
 
 
 
@@ -298,7 +298,7 @@ extends 
 
 getResult
-publicTgetResult()
+publicTgetResult()
 
 
 


[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index dd3dc2c..3e73e95 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -2397,6 +2397,8 @@
 org.apache.hadoop.hbase.namespace.NamespaceStateManager
 org.apache.hadoop.hbase.namespace.NamespaceTableAndRegionInfo
 org.apache.hadoop.hbase.replication.NamespaceTableCfWALEntryFilter (implements 
org.apache.hadoop.hbase.replication.WALCellFilter, 
org.apache.hadoop.hbase.replication.WALEntryFilter)
+org.apache.hadoop.hbase.wal.NettyAsyncFSWALConfigHelper
+org.apache.hadoop.hbase.util.NettyEventLoopGroupConfig
 org.apache.hadoop.hbase.ipc.NettyRpcClientConfigHelper
 org.apache.hadoop.hbase.http.NoCacheFilter (implements 
javax.servlet.Filter)
 org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController (implements 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 64c0395..2f0802d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"3.0.0-SNAPSHOT";
-011  public static final String revision = 
"c48bb67123e7bd622c567393097d81665dc5fff8";
+011  public static final String revision = 
"351703455a091171a1abc90f250f52f0a7a0aaab";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Sun 
Jul  9 14:39:17 UTC 2017";
+013  public static final String date = "Mon 
Jul 10 14:39:30 UTC 2017";
 014  public static final String url = 
"git://asf920.gq1.ygridcore.net/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "61ae5acbf52c24d2025a705b662b6bb1";
+015  public static final String srcChecksum 
= "da1c60d65bc12f522b3d4324366e19df";
 016}
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index cdfccb8..e303773 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -832,154 +832,182 @@
 824  }
 825
 826  /**
-827   * Get a list of {@link RegionLoad} of 
all regions hosted on a region seerver for a table.
-828   * @param serverName
-829   * @param tableName
-830   * @return a list of {@link RegionLoad} 
wrapped by {@link CompletableFuture}
-831   */
-832  
CompletableFutureListRegionLoad getRegionLoads(ServerName 
serverName,
-833  OptionalTableName 
tableName);
-834
-835  /**
-836   * Check whether master is in 
maintenance mode
-837   * @return true if master is in 
maintenance mode, false otherwise. The return value will be
-838   * wrapped by a {@link 
CompletableFuture}
+827   * Shuts down the HBase cluster.
+828   */
+829  CompletableFutureVoid 
shutdown();
+830
+831  /**
+832   * Shuts down the current HBase master 
only.
+833   */
+834  CompletableFutureVoid 
stopMaster();
+835
+836  /**
+837   * Stop the designated regionserver.
+838   * @param serverName
 839   */
-840  CompletableFutureBoolean 
isMasterInMaintenanceMode();
+840  CompletableFutureVoid 
stopRegionServer(ServerName serverName);
 841
 842  /**
-843   * Get the current compaction state of 
a table. It could be in a major compaction, a minor
-844   * compaction, both, or none.
-845   * @param tableName table to examine
-846   * @return the current compaction state 
wrapped by a {@link CompletableFuture}
-847   */
-848  
CompletableFutureCompactionState getCompactionState(TableName 
tableName);
-849
-850  /**
-851   * Get the current compaction state of 
region. It could be in a major compaction, a minor
-852   * compaction, both, or none.
-853   * @param regionName region to 
examine
-854   * @return the current compaction state 
wrapped by a {@link CompletableFuture}
-855   */
-856  
CompletableFutureCompactionState getCompactionStateForRegion(byte[] 
regionName);
-857
-858  /**
-859   * Get the timestamp of the last major 
compaction for the passed table.
-860   * p
-861   * The timestamp of the oldest HFile 
resulting from a major compaction of that table, or not
-862   * present if no such HFile could be 
found.
-863   

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2d27954a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
index f5bc73a..feb42ea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteColumnFamilyFuture.html
@@ -4044,345 +4044,330 @@
 4036
 4037  @Override
 4038  public void 
drainRegionServers(ListServerName servers) throws IOException {
-4039final 
ListHBaseProtos.ServerName pbServers = new 
ArrayList(servers.size());
-4040for (ServerName server : servers) 
{
-4041  // Parse to ServerName to do 
simple validation.
-4042  
ServerName.parseServerName(server.toString());
-4043  
pbServers.add(ProtobufUtil.toServerName(server));
-4044}
-4045
-4046executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-4047  @Override
-4048  public Void rpcCall() throws 
ServiceException {
-4049DrainRegionServersRequest req 
=
-4050
DrainRegionServersRequest.newBuilder().addAllServerName(pbServers).build();
-4051
master.drainRegionServers(getRpcController(), req);
-4052return null;
-4053  }
-4054});
-4055  }
-4056
-4057  @Override
-4058  public ListServerName 
listDrainingRegionServers() throws IOException {
-4059return executeCallable(new 
MasterCallableListServerName(getConnection(),
-4060  getRpcControllerFactory()) 
{
-4061  @Override
-4062  public ListServerName 
rpcCall() throws ServiceException {
-4063ListDrainingRegionServersRequest 
req = ListDrainingRegionServersRequest.newBuilder().build();
-4064ListServerName servers = 
new ArrayList();
-4065for (HBaseProtos.ServerName 
server : master.listDrainingRegionServers(null, req)
-4066.getServerNameList()) {
-4067  
servers.add(ProtobufUtil.toServerName(server));
-4068}
-4069return servers;
-4070  }
-4071});
-4072  }
-4073
-4074  @Override
-4075  public void 
removeDrainFromRegionServers(ListServerName servers) throws IOException 
{
-4076final 
ListHBaseProtos.ServerName pbServers = new 
ArrayList(servers.size());
-4077for (ServerName server : servers) 
{
-4078  
pbServers.add(ProtobufUtil.toServerName(server));
-4079}
-4080
-4081executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-4082  @Override
-4083  public Void rpcCall() throws 
ServiceException {
-4084
RemoveDrainFromRegionServersRequest req = 
RemoveDrainFromRegionServersRequest.newBuilder()
-4085
.addAllServerName(pbServers).build();
-4086
master.removeDrainFromRegionServers(getRpcController(), req);
-4087return null;
+4039executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
+4040  @Override
+4041  public Void rpcCall() throws 
ServiceException {
+4042
master.drainRegionServers(getRpcController(),
+4043  
RequestConverter.buildDrainRegionServersRequest(servers));
+4044return null;
+4045  }
+4046});
+4047  }
+4048
+4049  @Override
+4050  public ListServerName 
listDrainingRegionServers() throws IOException {
+4051return executeCallable(new 
MasterCallableListServerName(getConnection(),
+4052  getRpcControllerFactory()) 
{
+4053  @Override
+4054  public ListServerName 
rpcCall() throws ServiceException {
+4055ListDrainingRegionServersRequest 
req = ListDrainingRegionServersRequest.newBuilder().build();
+4056ListServerName servers = 
new ArrayList();
+4057for (HBaseProtos.ServerName 
server : master.listDrainingRegionServers(null, req)
+4058.getServerNameList()) {
+4059  
servers.add(ProtobufUtil.toServerName(server));
+4060}
+4061return servers;
+4062  }
+4063});
+4064  }
+4065
+4066  @Override
+4067  public void 
removeDrainFromRegionServers(ListServerName servers) throws IOException 
{
+4068executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
+4069  @Override
+4070  public Void rpcCall() throws 
ServiceException {
+4071
master.removeDrainFromRegionServers(getRpcController(), 
RequestConverter.buildRemoveDrainFromRegionServersRequest(servers));
+4072return null;
+4073  }
+4074});
+4075  }
+4076
+4077  @Override
+4078  public ListTableCFs 
listReplicatedTableCFs() throws IOException {
+4079ListTableCFs 
replicatedTableCFs = new ArrayList();
+4080HTableDescriptor[] tables = 
listTables();
+4081for (HTableDescriptor table : 
tables) {

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index aed7651..8681390 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -508,971 +508,978 @@
 500
 501@Override
 502public Bytes getValue(Bytes key) {
-503  return values.get(key);
-504}
-505
-506@Override
-507public byte[] getValue(byte[] key) 
{
-508  Bytes value = values.get(new 
Bytes(key));
-509  return value == null ? null : 
value.get();
-510}
-511
-512private T T 
getOrDefault(Bytes key, FunctionString, T function, T defaultValue) {
-513  Bytes value = values.get(key);
-514  if (value == null) {
-515return defaultValue;
-516  } else {
-517return 
function.apply(Bytes.toString(value.get(), value.getOffset(), 
value.getLength()));
-518  }
-519}
-520
-521/**
-522 * Getter for fetching an 
unmodifiable {@link #values} map.
-523 *
-524 * @return unmodifiable map {@link 
#values}.
-525 * @see #values
-526 */
-527@Override
-528public MapBytes, Bytes 
getValues() {
-529  // shallow pointer copy
-530  return 
Collections.unmodifiableMap(values);
-531}
-532
-533/**
-534 * Setter for storing metadata as a 
(key, value) pair in {@link #values} map
-535 *
-536 * @param key The key.
-537 * @param value The value. If null, 
removes the setting.
-538 * @return the modifyable TD
-539 * @see #values
-540 */
-541public ModifyableTableDescriptor 
setValue(byte[] key, byte[] value) {
-542  return setValue(toBytesOrNull(key, 
v - v),
-543  toBytesOrNull(value, v 
- v));
-544}
-545
-546/*
-547 * @param key The key.
-548 * @param value The value. If null, 
removes the setting.
-549 */
-550private ModifyableTableDescriptor 
setValue(final Bytes key,
-551final String value) {
-552  return setValue(key, 
toBytesOrNull(value, Bytes::toBytes));
-553}
-554
-555/*
-556 * Setter for storing metadata as a 
(key, value) pair in {@link #values} map
-557 *
-558 * @param key The key.
-559 * @param value The value. If null, 
removes the setting.
-560 */
-561public ModifyableTableDescriptor 
setValue(final Bytes key, final Bytes value) {
-562  if (value == null) {
-563values.remove(key);
-564  } else {
-565values.put(key, value);
-566  }
-567  return this;
-568}
-569
-570private static T Bytes 
toBytesOrNull(T t, FunctionT, byte[] f) {
-571  if (t == null) {
-572return null;
-573  } else {
-574return new Bytes(f.apply(t));
-575  }
-576}
-577
-578/**
-579 * Remove metadata represented by the 
key from the {@link #values} map
-580 *
-581 * @param key Key whose key and value 
we're to remove from TableDescriptor
-582 * parameters.
-583 * @return the modifyable TD
-584 */
-585public ModifyableTableDescriptor 
removeValue(Bytes key) {
-586  return setValue(key, (Bytes) 
null);
-587}
-588
-589/**
-590 * Remove metadata represented by the 
key from the {@link #values} map
-591 *
-592 * @param key Key whose key and value 
we're to remove from TableDescriptor
-593 * parameters.
-594 * @return the modifyable TD
-595 */
-596public ModifyableTableDescriptor 
removeValue(final byte[] key) {
-597  return removeValue(new 
Bytes(key));
-598}
-599
-600/**
-601 * Check if the readOnly flag of the 
table is set. If the readOnly flag is
-602 * set then the contents of the table 
can only be read from but not
-603 * modified.
-604 *
-605 * @return true if all columns in the 
table should be read only
-606 */
-607@Override
-608public boolean isReadOnly() {
-609  return getOrDefault(READONLY_KEY, 
Boolean::valueOf, DEFAULT_READONLY);
-610}
-611
-612/**
-613 * Setting the table as read only 
sets all the columns in the table as read
-614 * only. By default all tables are 
modifiable, but if the readOnly flag is
-615 * set to true then the contents of 
the table can only be read but not
-616 * modified.
-617 *
-618 * @param readOnly True if all of the 
columns in the table should be read
-619 * only.
-620 * @return the modifyable TD
-621 */
-622public ModifyableTableDescriptor 
setReadOnly(final boolean readOnly) {
-623  return setValue(READONLY_KEY, 
Boolean.toString(readOnly));
-624}
-625
-626/**
-627 * Check if the compaction enable 
flag of the table is true. If flag 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/devapidocs/src-html/org/apache/hadoop/hbase/io/FileLink.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/FileLink.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/FileLink.html
index 6ee4990..e01933b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/FileLink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/FileLink.html
@@ -46,475 +46,479 @@
 038import 
org.apache.hadoop.fs.PositionedReadable;
 039import org.apache.hadoop.fs.Seekable;
 040import 
org.apache.hadoop.hbase.util.FSUtils;
-041
-042/**
-043 * The FileLink is a sort of hardlink, 
that allows access to a file given a set of locations.
-044 *
-045 * pbThe 
Problem:/b
-046 * ul
-047 *  li
-048 *HDFS doesn't have support for 
hardlinks, and this make impossible to referencing
-049 *the same data blocks using 
different names.
-050 *  /li
-051 *  li
-052 *HBase store files in one location 
(e.g. table/region/family/) and when the file is not
-053 *needed anymore (e.g. compaction, 
region deletion, ...) moves it to an archive directory.
-054 *  /li
-055 * /ul
-056 * If we want to create a reference to a 
file, we need to remember that it can be in its
-057 * original location or in the archive 
folder.
-058 * The FileLink class tries to abstract 
this concept and given a set of locations
-059 * it is able to switch between them 
making this operation transparent for the user.
-060 * {@link HFileLink} is a more concrete 
implementation of the {@code FileLink}.
-061 *
-062 * 
pbBack-references:/b
-063 * To help the {@link 
org.apache.hadoop.hbase.master.cleaner.CleanerChore} to keep track of
-064 * the links to a particular file, during 
the {@code FileLink} creation, a new file is placed
-065 * inside a back-reference directory. 
There's one back-reference directory for each file that
-066 * has links, and in the directory 
there's one file per link.
-067 *
-068 * pHFileLink Example
-069 * ul
-070 *  li
-071 *  /hbase/table/region-x/cf/file-k
-072 *  (Original File)
-073 *  /li
-074 *  li
-075 *  
/hbase/table-cloned/region-y/cf/file-k.region-x.table
-076 * (HFileLink to the original file)
-077 *  /li
-078 *  li
-079 *  
/hbase/table-2nd-cloned/region-z/cf/file-k.region-x.table
-080 *  (HFileLink to the original 
file)
-081 *  /li
-082 *  li
-083 *  
/hbase/.archive/table/region-x/.links-file-k/region-y.table-cloned
-084 *  (Back-reference to the link in 
table-cloned)
-085 *  /li
-086 *  li
-087 *  
/hbase/.archive/table/region-x/.links-file-k/region-z.table-2nd-cloned
-088 *  (Back-reference to the link in 
table-2nd-cloned)
-089 *  /li
-090 * /ul
-091 */
-092@InterfaceAudience.Private
-093public class FileLink {
-094  private static final Log LOG = 
LogFactory.getLog(FileLink.class);
-095
-096  /** Define the Back-reference directory 
name prefix: .links-lt;hfilegt;/ */
-097  public static final String 
BACK_REFERENCES_DIRECTORY_PREFIX = ".links-";
-098
-099  /**
-100   * FileLink InputStream that handles 
the switch between the original path
-101   * and the alternative locations, when 
the file is moved.
-102   */
-103  private static class 
FileLinkInputStream extends InputStream
-104  implements Seekable, 
PositionedReadable, CanSetDropBehind, CanSetReadahead {
-105private FSDataInputStream in = 
null;
-106private Path currentPath = null;
-107private long pos = 0;
-108
-109private final FileLink fileLink;
-110private final int bufferSize;
-111private final FileSystem fs;
-112
-113public FileLinkInputStream(final 
FileSystem fs, final FileLink fileLink)
-114throws IOException {
-115  this(fs, fileLink, 
FSUtils.getDefaultBufferSize(fs));
-116}
-117
-118public FileLinkInputStream(final 
FileSystem fs, final FileLink fileLink, int bufferSize)
-119throws IOException {
-120  this.bufferSize = bufferSize;
-121  this.fileLink = fileLink;
-122  this.fs = fs;
-123
-124  this.in = tryOpen();
-125}
-126
-127@Override
-128public int read() throws IOException 
{
-129  int res;
-130  try {
-131res = in.read();
-132  } catch (FileNotFoundException e) 
{
-133res = tryOpen().read();
-134  } catch (NullPointerException e) { 
// HDFS 1.x - DFSInputStream.getBlockAt()
-135res = tryOpen().read();
-136  } catch (AssertionError e) { // 
assert in HDFS 1.x - DFSInputStream.getBlockAt()
-137res = tryOpen().read();
-138  }
-139  if (res  0) pos += 1;
-140  return res;
-141}
-142
-143@Override
-144public int read(byte[] b) throws 
IOException {
-145   return read(b, 0, b.length);
-146}
-147
-148@Override
-149public int read(byte[] b, int off, 
int len) throws IOException {
-150  int n;
-151  try {
-152n = in.read(b, off, len);
-153  } catch 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca9f6925/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 4bfdd88..a2a3834 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -1967,81 +1967,81 @@ service.
 SnapshotDescription.table
 
 
-protected TableName
-AsyncHBaseAdmin.TableProcedureBiConsumer.tableName
-
-
 private TableName
 HRegionLocator.tableName
 
-
+
 private TableName
 ScannerCallableWithReplicas.tableName
 
-
+
 private TableName
 ClientScanner.tableName
 
-
+
 private TableName
 AsyncClientScanner.tableName
 
-
+
 private TableName
 AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
 
-
+
 private TableName
 AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
 
-
+
 private TableName
 RawAsyncTableImpl.tableName
 
-
+
 private TableName
 RegionCoprocessorRpcChannelImpl.tableName
 
-
+
 private TableName
 AsyncTableRegionLocatorImpl.tableName
 
-
+
 protected TableName
 RegionAdminServiceCallable.tableName
 
-
+
 private TableName
 HTable.tableName
 
-
+
 private TableName
 BufferedMutatorImpl.tableName
 
-
+
 private TableName
 AsyncBatchRpcRetryingCaller.tableName
 
-
+
 private TableName
 BufferedMutatorParams.tableName
 
-
+
 private TableName
 HBaseAdmin.TableFuture.tableName
 
-
+
 private TableName
 AsyncRequestFutureImpl.tableName
 
-
+
 private TableName
 AsyncProcessTask.tableName
 
-
+
 private TableName
 AsyncProcessTask.Builder.tableName
 
+
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
+
 
 private TableName
 RegionServerCallable.tableName
@@ -2279,7 +2279,7 @@ service.
 
 
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFutureTableName
-AsyncHBaseAdmin.checkRegionsAndGetTableName(byte[]encodeRegionNameA,
+RawAsyncHBaseAdmin.checkRegionsAndGetTableName(byte[]encodeRegionNameA,
byte[]encodeRegionNameB)
 
 
@@ -2300,6 +2300,11 @@ service.
 List all of the names of userspace tables.
 
 
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableName
+RawAsyncHBaseAdmin.listTableNames(http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
+  booleanincludeSysTables)
+
 
 
 
@@ -2344,49 +2349,54 @@ service.
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+RawAsyncHBaseAdmin.addColumnFamily(TableNametableName,
+   ColumnFamilyDescriptorcolumnFamily)
+
+
 void
 Admin.addColumnFamily(TableNametableName,
HColumnDescriptorcolumnFamily)
 Add a column family to an existing table.
 
 
-
+
 void
 HBaseAdmin.addColumnFamily(TableNametableName,
HColumnDescriptorcolumnFamily)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 Admin.addColumnFamilyAsync(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Add a column family to an existing table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 HBaseAdmin.addColumnFamilyAsync(TableNametableName,
 HColumnDescriptorcolumnFamily)
 
-
+
 void
 MetaCache.cacheLocation(TableNametableName,
  RegionLocationslocations)
 Put a newly discovered HRegionLocation into the cache.
 
 
-
+
 void
 ConnectionImplementation.cacheLocation(TableNametableName,
  RegionLocationslocation)
 Put a newly discovered HRegionLocation into the cache.
 
 
-
+
 void
 ClusterConnection.cacheLocation(TableNametableName,
  RegionLocationslocation)
 
-
+
 void
 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e3b63ca/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index dc12c09..82506d2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -54,2261 +54,2259 @@
 046import org.apache.commons.io.IOUtils;
 047import org.apache.commons.logging.Log;
 048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.directory.api.util.OptionalComponentsMonitor;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLocations;
-057import 
org.apache.hadoop.hbase.ServerName;
-058import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-059import 
org.apache.hadoop.hbase.HConstants;
-060import 
org.apache.hadoop.hbase.TableExistsException;
-061import 
org.apache.hadoop.hbase.TableName;
-062import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-063import 
org.apache.hadoop.hbase.TableNotDisabledException;
-064import 
org.apache.hadoop.hbase.TableNotEnabledException;
-065import 
org.apache.hadoop.hbase.TableNotFoundException;
-066import 
org.apache.hadoop.hbase.UnknownRegionException;
-067import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-068import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aecb1286/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
index 6f6a952..8c2fe2f 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
@@ -89,11 +89,11 @@
 Annotation Type Hierarchy
 
 org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 
 
 



[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a719cd00/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
index 49714a2..d0f1508 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
@@ -172,1438 +172,1562 @@
 164MapServerName, 
ListHRegionInfo clusterState;
 165
 166protected final RackManager 
rackManager;
-167
-168protected Cluster(
-169MapServerName, 
ListHRegionInfo clusterState,
-170MapString, 
DequeBalancerRegionLoad loads,
-171RegionLocationFinder 
regionFinder,
-172RackManager rackManager) {
-173  this(null, clusterState, loads, 
regionFinder, rackManager);
-174}
-175
-176@SuppressWarnings("unchecked")
-177protected Cluster(
-178CollectionHRegionInfo 
unassignedRegions,
-179MapServerName, 
ListHRegionInfo clusterState,
-180MapString, 
DequeBalancerRegionLoad loads,
-181RegionLocationFinder 
regionFinder,
-182RackManager rackManager) {
-183
-184  if (unassignedRegions == null) {
-185unassignedRegions = 
EMPTY_REGION_LIST;
-186  }
+167// Maps region - rackIndex - 
locality of region on rack
+168private float[][] rackLocalities;
+169// Maps localityType - region 
- [server|rack]Index with highest locality
+170private int[][] 
regionsToMostLocalEntities;
+171
+172protected Cluster(
+173MapServerName, 
ListHRegionInfo clusterState,
+174MapString, 
DequeBalancerRegionLoad loads,
+175RegionLocationFinder 
regionFinder,
+176RackManager rackManager) {
+177  this(null, clusterState, loads, 
regionFinder, rackManager);
+178}
+179
+180@SuppressWarnings("unchecked")
+181protected Cluster(
+182CollectionHRegionInfo 
unassignedRegions,
+183MapServerName, 
ListHRegionInfo clusterState,
+184MapString, 
DequeBalancerRegionLoad loads,
+185RegionLocationFinder 
regionFinder,
+186RackManager rackManager) {
 187
-188  serversToIndex = new 
HashMap();
-189  hostsToIndex = new 
HashMap();
-190  racksToIndex = new 
HashMap();
-191  tablesToIndex = new 
HashMap();
-192
-193  //TODO: We should get the list of 
tables from master
-194  tables = new ArrayList();
-195  this.rackManager = rackManager != 
null ? rackManager : new DefaultRackManager();
+188  if (unassignedRegions == null) {
+189unassignedRegions = 
EMPTY_REGION_LIST;
+190  }
+191
+192  serversToIndex = new 
HashMap();
+193  hostsToIndex = new 
HashMap();
+194  racksToIndex = new 
HashMap();
+195  tablesToIndex = new 
HashMap();
 196
-197  numRegions = 0;
-198
-199  ListListInteger 
serversPerHostList = new ArrayList();
-200  ListListInteger 
serversPerRackList = new ArrayList();
-201  this.clusterState = clusterState;
-202  this.regionFinder = regionFinder;
-203
-204  // Use servername and port as there 
can be dead servers in this list. We want everything with
-205  // a matching hostname and port to 
have the same index.
-206  for (ServerName sn : 
clusterState.keySet()) {
-207if (sn == null) {
-208  LOG.warn("TODO: Enable TRACE on 
BaseLoadBalancer. Empty servername); " +
-209  "skipping; unassigned 
regions?");
-210  if (LOG.isTraceEnabled()) {
-211LOG.trace("EMPTY SERVERNAME " 
+ clusterState.toString());
-212  }
-213  continue;
-214}
-215if 
(serversToIndex.get(sn.getAddress().toString()) == null) {
-216  
serversToIndex.put(sn.getHostAndPort(), numServers++);
-217}
-218if 
(!hostsToIndex.containsKey(sn.getHostname())) {
-219  
hostsToIndex.put(sn.getHostname(), numHosts++);
-220  serversPerHostList.add(new 
ArrayList(1));
+197  //TODO: We should get the list of 
tables from master
+198  tables = new ArrayList();
+199  this.rackManager = rackManager != 
null ? rackManager : new DefaultRackManager();
+200
+201  numRegions = 0;
+202
+203  ListListInteger 
serversPerHostList = new ArrayList();
+204  ListListInteger 
serversPerRackList = new ArrayList();
+205  this.clusterState = clusterState;
+206  this.regionFinder = regionFinder;
+207
+208  // Use servername and port as there 
can be dead servers in this list. We want everything with
+209  // a matching hostname and port to 
have the same index.
+210  for (ServerName sn : 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/476c54ed/devapidocs/org/apache/hadoop/hbase/regionserver/Store.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/Store.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/Store.html
index b770602..27bfdcd 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/Store.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/Store.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":38,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":38,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":38,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":18,"i44":6,"i45":18,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":38,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":38,"i72":6,"i73":6,"i74":6,"i75":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":38,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":38,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":38,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":18,"i45":6,"i46":18,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":38,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":38,"i73":6,"i74":6,"i75":6,"i76":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -318,30 +318,34 @@ extends 
 
 long
-getLastCompactSize()
+getHFilesSize()
 
 
 long
-getMajorCompactedCellsCount()
+getLastCompactSize()
 
 
 long
-getMajorCompactedCellsSize()
+getMajorCompactedCellsCount()
 
 
 long
-getMaxMemstoreTS()
+getMajorCompactedCellsSize()
 
 
 long
-getMaxSequenceId()
+getMaxMemstoreTS()
 
 
 long
-getMaxStoreFileAge()
+getMaxSequenceId()
 
 
 long
+getMaxStoreFileAge()
+
+
+long
 getMemStoreSize()
 Deprecated.
 Since 2.0 and will be 
removed in 3.0. Use getSizeOfMemStore()
 instead.
@@ -351,27 +355,27 @@ extends 
 
 
-
+
 long
 getMinStoreFileAge()
 
-
+
 long
 getNumHFiles()
 
-
+
 long
 getNumReferenceFiles()
 
-
+
 HRegionInfo
 getRegionInfo()
 
-
+
 ScanInfo
 getScanInfo()
 
-
+
 KeyValueScanner
 getScanner(Scanscan,
   http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in 
java.util">NavigableSetbyte[]targetCols,
@@ -379,7 +383,7 @@ extends Return a scanner for both the memstore and the HStore 
files.
 
 
-
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
 getScanners(booleancacheBlocks,
booleanisGet,
@@ -392,7 +396,7 @@ extends Get all scanners with no filtering based on TTL (that 
happens further down the line).
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
 getScanners(booleancacheBlocks,
booleanusePread,
@@ -406,7 +410,7 @@ extends Get all scanners with no filtering based on TTL (that 
happens further down the line).
 
 
-
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
 getScanners(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefiles,
booleancacheBlocks,
@@ -422,7 +426,7 @@ extends 
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
 getScanners(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefiles,
booleancacheBlocks,
@@ -439,27 +443,27 @@ extends 
 
 
-
+
 long
 getSize()
 
-
+
 MemstoreSize
 getSizeOfMemStore()
 
-
+
 MemstoreSize
 getSizeOfSnapshot()
 
-
+
 MemstoreSize
 getSizeToFlush()
 
-
+
 long
 getSmallestReadPoint()
 
-
+
 long
 getSnapshotSize()
 Deprecated.
@@ -470,43 +474,43 @@ extends 
 
 
-
+
 byte[]
 getSplitPoint()
 Determines if Store should be split
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
 getStorefiles()
 
-
+
 int
 getStorefilesCount()
 
-
+
 long
 getStorefilesIndexSize()
 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/77a552c4/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index 1423d72..aed7651 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -28,11 +28,11 @@
 020
 021import java.io.IOException;
 022import java.util.ArrayList;
-023import java.util.Collection;
-024import java.util.Collections;
-025import java.util.HashMap;
-026import java.util.HashSet;
-027import java.util.Iterator;
+023import java.util.Arrays;
+024import java.util.Collection;
+025import java.util.Collections;
+026import java.util.HashMap;
+027import java.util.HashSet;
 028import java.util.List;
 029import java.util.Map;
 030import java.util.Set;
@@ -40,1601 +40,1439 @@
 032import java.util.TreeSet;
 033import java.util.function.Function;
 034import java.util.regex.Matcher;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import org.apache.hadoop.fs.Path;
-038import 
org.apache.hadoop.hbase.Coprocessor;
-039import 
org.apache.hadoop.hbase.HColumnDescriptor;
+035import java.util.stream.Stream;
+036import org.apache.commons.logging.Log;
+037import 
org.apache.commons.logging.LogFactory;
+038import org.apache.hadoop.fs.Path;
+039import 
org.apache.hadoop.hbase.Coprocessor;
 040import 
org.apache.hadoop.hbase.HConstants;
-041import 
org.apache.hadoop.hbase.HTableDescriptor;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-044import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-045import 
org.apache.hadoop.hbase.security.User;
-046import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-047import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049
-050@InterfaceAudience.Public
-051public class TableDescriptorBuilder {
-052
-053  private static final Log LOG = 
LogFactory.getLog(TableDescriptorBuilder.class);
-054
-055  @InterfaceAudience.Private
-056  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
-057
-058  /**
-059   * Used by HBase Shell interface to 
access this metadata
-060   * attribute which denotes the maximum 
size of the store file after which a
-061   * region split occurs.
-062   */
-063  @InterfaceAudience.Private
-064  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
-065  private static final Bytes 
MAX_FILESIZE_KEY
-066  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
-067
-068  @InterfaceAudience.Private
-069  public static final String OWNER = 
"OWNER";
-070  @InterfaceAudience.Private
-071  public static final Bytes OWNER_KEY
-072  = new 
Bytes(Bytes.toBytes(OWNER));
-073
-074  /**
-075   * Used by rest interface to access 
this metadata attribute
-076   * which denotes if the table is Read 
Only.
-077   */
-078  @InterfaceAudience.Private
-079  public static final String READONLY = 
"READONLY";
-080  private static final Bytes 
READONLY_KEY
-081  = new 
Bytes(Bytes.toBytes(READONLY));
-082
-083  /**
-084   * Used by HBase Shell interface to 
access this metadata
-085   * attribute which denotes if the table 
is compaction enabled.
-086   */
-087  @InterfaceAudience.Private
-088  public static final String 
COMPACTION_ENABLED = "COMPACTION_ENABLED";
-089  private static final Bytes 
COMPACTION_ENABLED_KEY
-090  = new 
Bytes(Bytes.toBytes(COMPACTION_ENABLED));
-091
-092  /**
-093   * Used by HBase Shell interface to 
access this metadata
-094   * attribute which represents the 
maximum size of the memstore after which its
-095   * contents are flushed onto the 
disk.
-096   */
-097  @InterfaceAudience.Private
-098  public static final String 
MEMSTORE_FLUSHSIZE = "MEMSTORE_FLUSHSIZE";
-099  private static final Bytes 
MEMSTORE_FLUSHSIZE_KEY
-100  = new 
Bytes(Bytes.toBytes(MEMSTORE_FLUSHSIZE));
-101
-102  @InterfaceAudience.Private
-103  public static final String FLUSH_POLICY 
= "FLUSH_POLICY";
-104
-105  /**
-106   * Used by rest interface to access 
this metadata attribute
-107   * which denotes if it is a catalog 
table, either code hbase:meta /code.
-108   */
-109  @InterfaceAudience.Private
-110  public static final String IS_META = 
"IS_META";
-111  private static final Bytes 
IS_META_KEY
-112  = new 
Bytes(Bytes.toBytes(IS_META));
-113
-114  /**
-115   * {@link Durability} setting for the 
table.
-116   */
-117  @InterfaceAudience.Private
-118  public static final String DURABILITY = 
"DURABILITY";
-119  private static final Bytes 
DURABILITY_KEY
-120  = new 
Bytes(Bytes.toBytes("DURABILITY"));
-121
-122  /**
-123  

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b44796ef/apidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerStoppedException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerStoppedException.html
 
b/apidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerStoppedException.html
index 72168c8..89c4f42 100644
--- 
a/apidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerStoppedException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerStoppedException.html
@@ -4,7 +4,7 @@
 
 
 
-Uses of Class 
org.apache.hadoop.hbase.regionserver.RegionServerStoppedException (Apache HBase 
2.0.0-SNAPSHOT API)
+Uses of Class 
org.apache.hadoop.hbase.regionserver.RegionServerStoppedException (Apache HBase 
3.0.0-SNAPSHOT API)
 
 
 
@@ -12,7 +12,7 @@
 
 
-Uses of Class org.apache.hadoop.hbase.regionserver.RowTooBigException 
(Apache HBase 2.0.0-SNAPSHOT API)
+Uses of Class org.apache.hadoop.hbase.regionserver.RowTooBigException 
(Apache HBase 3.0.0-SNAPSHOT API)