[06/40] hbase-site git commit: Published site at 6d7bc0e98b25215e79f67f107fd0d3306dfcf352.

2018-09-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/738e976e/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.MockHMaster.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.MockHMaster.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.MockHMaster.html
index e4180ea..320a1f0 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.MockHMaster.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.MockHMaster.html
@@ -270,7 +270,7 @@ extends org.apache.hadoop.hbase.master.HMaster
 
 
 Methods inherited from 
class org.apache.hadoop.hbase.master.HMaster
-abort, abortProcedure, addColumn, addReplicationPeer, balance, balance, 
balanceSwitch, canCreateBaseZNode, canUpdateTableDescriptor, 
checkIfShouldMoveSystemRegionAsync, checkTableModifiable, configureInfoServer, 
constructMaster, createMetaBootstrap, createRpcServices, createServerManager, 
createSystemTable, createTable, decommissionRegionServers, 
decorateMasterConfiguration, deleteColumn, deleteTable, disableReplicationPeer, 
disableTable, enableReplicationPeer, enableTable, getAssignmentManager, 
getAverageLoad, getCatalogJanitor, getClientIdAuditPrefix, getClusterMetrics, 
getClusterMetrics, getClusterMetricsWithoutCoprocessor, 
getClusterMetricsWithoutCoprocessor, getClusterSchema, getDumpServlet, 
getFavoredNodesManager, getHFileCleaner, getInitializedEvent, 
getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion, 
getLoadBalancer, getLoadBalancerClassName, getLoadedCoprocessors, 
getLockManager, getLocks, getLogCleaner, getMasterActiveTime, 
getMasterCoprocessor
 Host, getMasterCoprocessors, getMasterFileSystem, 
getMasterFinishedInitializationTime, getMasterMetrics, 
getMasterProcedureExecutor, getMasterProcedureManagerHost, 
getMasterQuotaManager, getMasterRpcServices, getMasterStartTime, 
getMasterWalManager, getMergePlanCount, getMetaTableObserver, 
getMobCompactionState, getNumWALFiles, getProcedures, getProcessName, 
getQuotaObserverChore, getRegionNormalizer, getRegionNormalizerTracker, 
getRegionServerFatalLogBuffer, getRegionServerInfoPort, getRegionServerVersion, 
getReplicationLoad, getReplicationPeerConfig, getServerManager, getServerName, 
getSnapshotManager, getSnapshotQuotaObserverChore, 
getSpaceQuotaSnapshotNotifier, getSplitOrMergeTracker, getSplitPlanCount, 
getSyncReplicationReplayWALManager, getTableDescriptors, getTableStateManager, 
getUseThisHostnameInstead, getWalProcedureStore, getZooKeeper, 
initClusterSchemaService, initializeZKBasedSystemTrackers, isActiveMaster, 
isBalancerOn, isInitialized, isInMaintenanceMode, isNormalizerO
 n, isSplitOrMergeEnabled, listDecommissionedRegionServers, 
listReplicationPeers, listTableDescriptors, listTableDescriptorsByNamespace, 
listTableNames, listTableNamesByNamespace, login, main, mergeRegions, 
modifyColumn, modifyTable, move, normalizeRegions, recommissionRegionServer, 
registerService, remoteProcedureCompleted, remoteProcedureFailed, 
removeReplicationPeer, reportMobCompactionEnd, reportMobCompactionStart, 
requestMobCompaction, restoreSnapshot, run, setCatalogJanitorEnabled, 
setInitialized, shutdown, splitRegion, stop, stopMaster, stopServiceThreads, 
transitReplicationPeerSyncReplicationState, truncateTable, 
updateConfigurationForQuotasObserver, updateReplicationPeerConfig, 
waitForMasterActive
+abort, abortProcedure, addColumn, addReplicationPeer, balance, balance, 
balanceSwitch, canCreateBaseZNode, canUpdateTableDescriptor, 
checkIfShouldMoveSystemRegionAsync, checkTableModifiable, configureInfoServer, 
constructMaster, createMetaBootstrap, createRpcServices, createServerManager, 
createSystemTable, createTable, decommissionRegionServers, 
decorateMasterConfiguration, deleteColumn, deleteTable, disableReplicationPeer, 
disableTable, enableReplicationPeer, enableTable, getAssignmentManager, 
getAverageLoad, getCatalogJanitor, getClientIdAuditPrefix, getClusterMetrics, 
getClusterMetrics, getClusterMetricsWithoutCoprocessor, 
getClusterMetricsWithoutCoprocessor, getClusterSchema, getDumpServlet, 
getFavoredNodesManager, getHFileCleaner, getInitializedEvent, 
getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion, 
getLoadBalancer, getLoadBalancerClassName, getLoadedCoprocessors, 
getLockManager, getLocks, getLogCleaner, getMasterActiveTime, 
getMasterCoprocessor
 Host, getMasterCoprocessors, getMasterFileSystem, 
getMasterFinishedInitializationTime, getMasterMetrics, 
getMasterProcedureExecutor, getMasterProcedureManagerHost, 
getMasterQuotaManager, getMasterRpcServices, getMasterStartTime, 
getMasterWalManager, getMergePlanCount, getMetaTableObserver, 
getMobCompactionState, getNumWALFiles, getProcedures, getProcessName, 
getQuotaObserverChore, getRegionNormalizer, getReg

[06/40] hbase-site git commit: Published site at 2aae247e3f8f8a393b403a82593bdc3a1ba81193.

2018-09-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/80652933/testdevapidocs/overview-tree.html
--
diff --git a/testdevapidocs/overview-tree.html 
b/testdevapidocs/overview-tree.html
index 8553fde..95faacf 100644
--- a/testdevapidocs/overview-tree.html
+++ b/testdevapidocs/overview-tree.html
@@ -3205,6 +3205,7 @@
 org.apache.hadoop.hbase.client.TestPutDotHas
 org.apache.hadoop.hbase.client.TestPutWithDelete
 org.apache.hadoop.hbase.client.TestPutWriteToWal
+org.apache.hadoop.hbase.filter.TestQualifierFilterWithEmptyQualifier
 org.apache.hadoop.hbase.quotas.TestQuotaAdmin
 org.apache.hadoop.hbase.quotas.TestQuotaFilter
 org.apache.hadoop.hbase.quotas.TestQuotaGlobalsSettingsBypass
@@ -3521,7 +3522,11 @@
 org.apache.hadoop.hbase.client.TestServerBusyException
 org.apache.hadoop.hbase.client.TestServerBusyException.SleepCoprocessor 
(implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
 org.apache.hadoop.hbase.client.TestServerBusyException.SleepLongerAtFirstCoprocessor
 (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
-org.apache.hadoop.hbase.master.procedure.TestServerCrashProcedure
+org.apache.hadoop.hbase.master.procedure.TestServerCrashProcedure
+
+org.apache.hadoop.hbase.master.procedure.TestServerCrashProcedureWithReplicas
+
+
 org.apache.hadoop.hbase.master.TestServerCrashProcedureCarryingMetaStuck
 org.apache.hadoop.hbase.master.TestServerCrashProcedureStuck
 org.apache.hadoop.hbase.regionserver.TestServerCustomProtocol



[06/40] hbase-site git commit: Published site at 5fd16f38533591615aa9afa48bb89bcbd8313caf.

2018-06-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
index 9ec6b6f..5d53c29 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
@@ -82,1024 +82,1053 @@
 074  private volatile long numReferenceFiles 
= 0;
 075  private volatile double 
requestsPerSecond = 0.0;
 076  private volatile long readRequestsCount 
= 0;
-077  private volatile long 
filteredReadRequestsCount = 0;
-078  private volatile long 
writeRequestsCount = 0;
-079  private volatile long 
checkAndMutateChecksFailed = 0;
-080  private volatile long 
checkAndMutateChecksPassed = 0;
-081  private volatile long 
storefileIndexSize = 0;
-082  private volatile long 
totalStaticIndexSize = 0;
-083  private volatile long 
totalStaticBloomSize = 0;
-084  private volatile long 
numMutationsWithoutWAL = 0;
-085  private volatile long 
dataInMemoryWithoutWAL = 0;
-086  private volatile double 
percentFileLocal = 0;
-087  private volatile double 
percentFileLocalSecondaryRegions = 0;
-088  private volatile long flushedCellsCount 
= 0;
-089  private volatile long 
compactedCellsCount = 0;
-090  private volatile long 
majorCompactedCellsCount = 0;
-091  private volatile long flushedCellsSize 
= 0;
-092  private volatile long 
compactedCellsSize = 0;
-093  private volatile long 
majorCompactedCellsSize = 0;
-094  private volatile long 
cellsCountCompactedToMob = 0;
-095  private volatile long 
cellsCountCompactedFromMob = 0;
-096  private volatile long 
cellsSizeCompactedToMob = 0;
-097  private volatile long 
cellsSizeCompactedFromMob = 0;
-098  private volatile long mobFlushCount = 
0;
-099  private volatile long 
mobFlushedCellsCount = 0;
-100  private volatile long 
mobFlushedCellsSize = 0;
-101  private volatile long mobScanCellsCount 
= 0;
-102  private volatile long mobScanCellsSize 
= 0;
-103  private volatile long 
mobFileCacheAccessCount = 0;
-104  private volatile long 
mobFileCacheMissCount = 0;
-105  private volatile double 
mobFileCacheHitRatio = 0;
-106  private volatile long 
mobFileCacheEvictedCount = 0;
-107  private volatile long mobFileCacheCount 
= 0;
-108  private volatile long 
blockedRequestsCount = 0L;
-109  private volatile long averageRegionSize 
= 0L;
-110
-111  private CacheStats cacheStats;
-112  private CacheStats l1Stats = null;
-113  private CacheStats l2Stats = null;
-114  private ScheduledExecutorService 
executor;
-115  private Runnable runnable;
-116  private long period;
-117
-118  /**
-119   * Can be null if not on hdfs.
-120   */
-121  private DFSHedgedReadMetrics 
dfsHedgedReadMetrics;
-122
-123  public 
MetricsRegionServerWrapperImpl(final HRegionServer regionServer) {
-124this.regionServer = regionServer;
-125initBlockCache();
-126initMobFileCache();
-127
-128this.period =
-129
regionServer.conf.getLong(HConstants.REGIONSERVER_METRICS_PERIOD,
-130  
HConstants.DEFAULT_REGIONSERVER_METRICS_PERIOD);
-131
-132this.executor = 
CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
-133this.runnable = new 
RegionServerMetricsWrapperRunnable();
-134
this.executor.scheduleWithFixedDelay(this.runnable, this.period, this.period,
-135  TimeUnit.MILLISECONDS);
-136this.metricsWALSource = 
CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
-137
-138try {
-139  this.dfsHedgedReadMetrics = 
FSUtils.getDFSHedgedReadMetrics(regionServer.getConfiguration());
-140} catch (IOException e) {
-141  LOG.warn("Failed to get hedged 
metrics", e);
-142}
-143if (LOG.isInfoEnabled()) {
-144  LOG.info("Computing regionserver 
metrics every " + this.period + " milliseconds");
-145}
-146  }
-147
-148  /**
-149   * It's possible that due to threading 
the block cache could not be initialized
-150   * yet (testing multiple region servers 
in one jvm).  So we need to try and initialize
-151   * the blockCache and cacheStats 
reference multiple times until we succeed.
-152   */
-153  private synchronized  void 
initBlockCache() {
-154CacheConfig cacheConfig = 
this.regionServer.cacheConfig;
-155if (cacheConfig != null) {
-156  l1Stats = 
cacheConfig.getOnHeapCacheStats();
-157  l2Stats = 
cacheConfig.getL2CacheStats();
-158  if (this.blockCache == null) {
-159this.blockCache = 
cacheConfig.getBlockCache();
-

[06/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.

2018-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
index 8c0d57c..e606e82 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
@@ -46,2582 +46,2583 @@
 038import java.util.Iterator;
 039import java.util.List;
 040
-041import org.apache.hadoop.hbase.Cell;
-042import 
org.apache.hadoop.hbase.CellComparator;
-043import 
org.apache.hadoop.hbase.KeyValue;
-044import 
org.apache.hadoop.io.RawComparator;
-045import 
org.apache.hadoop.io.WritableComparator;
-046import 
org.apache.hadoop.io.WritableUtils;
-047import 
org.apache.yetus.audience.InterfaceAudience;
-048import org.slf4j.Logger;
-049import org.slf4j.LoggerFactory;
-050
-051import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-052
-053import com.google.protobuf.ByteString;
-054
-055import sun.misc.Unsafe;
-056
-057/**
-058 * Utility class that handles byte 
arrays, conversions to/from other types,
-059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-060 * HashSets, and can be used as key in 
maps or trees.
-061 */
-062@SuppressWarnings("restriction")
-063@InterfaceAudience.Public
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
Comparable {
-068
-069  // Using the charset canonical name for 
String/byte[] conversions is much
-070  // more efficient due to use of cached 
encoders/decoders.
-071  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
-072
-073  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-074  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-075
-076  private static final Logger LOG = 
LoggerFactory.getLogger(Bytes.class);
-077
-078  /**
-079   * Size of boolean in bytes
-080   */
-081  public static final int SIZEOF_BOOLEAN 
= Byte.SIZE / Byte.SIZE;
-082
-083  /**
-084   * Size of byte in bytes
-085   */
-086  public static final int SIZEOF_BYTE = 
SIZEOF_BOOLEAN;
-087
-088  /**
-089   * Size of char in bytes
-090   */
-091  public static final int SIZEOF_CHAR = 
Character.SIZE / Byte.SIZE;
-092
-093  /**
-094   * Size of double in bytes
-095   */
-096  public static final int SIZEOF_DOUBLE = 
Double.SIZE / Byte.SIZE;
-097
-098  /**
-099   * Size of float in bytes
-100   */
-101  public static final int SIZEOF_FLOAT = 
Float.SIZE / Byte.SIZE;
-102
-103  /**
-104   * Size of int in bytes
-105   */
-106  public static final int SIZEOF_INT = 
Integer.SIZE / Byte.SIZE;
-107
-108  /**
-109   * Size of long in bytes
-110   */
-111  public static final int SIZEOF_LONG = 
Long.SIZE / Byte.SIZE;
-112
-113  /**
-114   * Size of short in bytes
-115   */
-116  public static final int SIZEOF_SHORT = 
Short.SIZE / Byte.SIZE;
-117
-118  /**
-119   * Mask to apply to a long to reveal 
the lower int only. Use like this:
-120   * int i = (int)(0xL ^ 
some_long_value);
-121   */
-122  public static final long 
MASK_FOR_LOWER_INT_IN_LONG = 0xL;
-123
-124  /**
-125   * Estimate of size cost to pay beyond 
payload in jvm for instance of byte [].
-126   * Estimate based on study of jhat and 
jprofiler numbers.
-127   */
-128  // JHat says BU is 56 bytes.
-129  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
-130  public static final int 
ESTIMATED_HEAP_TAX = 16;
-131
-132  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-133
-134  /**
-135   * Returns length of the byte array, 
returning 0 if the array is null.
-136   * Useful for calculating sizes.
-137   * @param b byte array, which can be 
null
-138   * @return 0 if b is null, otherwise 
returns length
-139   */
-140  final public static int len(byte[] b) 
{
-141return b == null ? 0 : b.length;
-142  }
-143
-144  private byte[] bytes;
-145  private int offset;
-146  private int length;
-147
-148  /**
-149   * Create a zero-size sequence.
-150   */
-151  public Bytes() {
-152super();
-153  }
-154
-155  /**
-156   * Create a Bytes using the byte array 
as the initial value.
-157   * @param bytes This array becomes the 
backing storage for the object.
-158   */
-159  public Bytes(byte[] bytes) {
-160this(bytes, 0, bytes.length);
-161  }
-162
-163  /**
-164   * Set the new Bytes to the contents of 
the passed
-165   * ibw.
-166   * @param ibw the value to set this 
Bytes to.
-167   */
-168  public Bytes(final Bytes ibw) {

[06/40] hbase-site git commit: Published site at .

2017-08-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c0fcd7f3/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.MockRSProcedureDispatcher.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.MockRSProcedureDispatcher.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.MockRSProcedureDispatcher.html
index 785d459..62da186 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.MockRSProcedureDispatcher.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.MockRSProcedureDispatcher.html
@@ -251,7 +251,7 @@
 243}
 244  }
 245
-246  @Test
+246  @Ignore @Test // Disabled for now. 
Since HBASE-18551, this mock is insufficient.
 247  public void testSocketTimeout() throws 
Exception {
 248final TableName tableName = 
TableName.valueOf(this.name.getMethodName());
 249final HRegionInfo hri = 
createRegionInfo(tableName, 1);
@@ -262,580 +262,579 @@
 254rsDispatcher.setMockRsExecutor(new 
SocketTimeoutRsExecutor(20, 3));
 255
waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
 256
-257rsDispatcher.setMockRsExecutor(new 
SocketTimeoutRsExecutor(20, 3));
-258
-259
exception.expect(ServerCrashException.class);
-260
waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, false)));
-261
-262assertEquals(assignSubmittedCount + 
1, assignProcMetrics.getSubmittedCounter().getCount());
-263assertEquals(assignFailedCount, 
assignProcMetrics.getFailedCounter().getCount());
-264assertEquals(unassignSubmittedCount + 
1, unassignProcMetrics.getSubmittedCounter().getCount());
-265assertEquals(unassignFailedCount + 1, 
unassignProcMetrics.getFailedCounter().getCount());
-266  }
-267
-268  @Test
-269  public void testServerNotYetRunning() 
throws Exception {
-270
testRetriesExhaustedFailure(TableName.valueOf(this.name.getMethodName()),
-271  new 
ServerNotYetRunningRsExecutor());
-272  }
-273
-274  private void 
testRetriesExhaustedFailure(final TableName tableName,
-275  final MockRSExecutor executor) 
throws Exception {
-276final HRegionInfo hri = 
createRegionInfo(tableName, 1);
-277
-278// collect AM metrics before test
-279collectAssignmentManagerMetrics();
-280
-281// Test Assign operation failure
-282
rsDispatcher.setMockRsExecutor(executor);
-283try {
-284  
waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
-285  fail("unexpected assign 
completion");
-286} catch (RetriesExhaustedException e) 
{
-287  // expected exception
-288  LOG.info("expected exception from 
assign operation: " + e.getMessage(), e);
-289}
-290
-291// Assign the region (without 
problems)
-292rsDispatcher.setMockRsExecutor(new 
GoodRsExecutor());
-293
waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
-294
-295// TODO: Currently unassign just 
keeps trying until it sees a server crash.
-296// There is no count on unassign.
-297/*
-298// Test Unassign operation failure
-299
rsDispatcher.setMockRsExecutor(executor);
-300
waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, false)));
-301
-302assertEquals(assignSubmittedCount + 
2, assignProcMetrics.getSubmittedCounter().getCount());
-303assertEquals(assignFailedCount + 1, 
assignProcMetrics.getFailedCounter().getCount());
-304assertEquals(unassignSubmittedCount + 
1, unassignProcMetrics.getSubmittedCounter().getCount());
-305
-306// TODO: We supposed to have 1 failed 
assign, 1 successful assign and a failed unassign
-307// operation. But ProcV2 framework 
marks aborted unassign operation as success. Fix it!
-308assertEquals(unassignFailedCount, 
unassignProcMetrics.getFailedCounter().getCount());
-309*/
-310  }
+257rsDispatcher.setMockRsExecutor(new 
SocketTimeoutRsExecutor(20, 1));
+258// 
exception.expect(ServerCrashException.class);
+259
waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, false)));
+260
+261assertEquals(assignSubmittedCount + 
1, assignProcMetrics.getSubmittedCounter().getCount());
+262assertEquals(assignFailedCount, 
assignProcMetrics.getFailedCounter().getCount());
+263assertEquals(unassignSubmittedCount + 
1, unassignProcMetrics.getSubmittedCounter().getCount());
+264assertEquals(unassignFailedCount + 1, 
unassignProcMetrics.getFailedCounter().getCount());
+265  }
+266
+267  @Test
+268  public void testServerNotYetRunning() 
throws Exception {
+269
testRetriesExhaustedFailure(TableName.valueOf(this.name.getMethodName()),
+270  new 
ServerNotYetRunningRsExecutor());
+271  }
+272
+273  private void 
testRetriesExhaustedFailure(final TableName tableName,
+274  final MockRSExecutor executor

[06/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1b6d8c10/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 9a6c30b..af6a1dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -54,1176 +54,1176 @@
 046import org.apache.commons.io.IOUtils;
 047import org.apache.commons.logging.Log;
 048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.HRegionInfo;
-050import 
org.apache.hadoop.hbase.HRegionLocation;
-051import 
org.apache.hadoop.hbase.MetaTableAccessor;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-053import 
org.apache.hadoop.hbase.NotServingRegionException;
-054import 
org.apache.hadoop.hbase.ProcedureInfo;
-055import 
org.apache.hadoop.hbase.RegionLocations;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-058import 
org.apache.hadoop.hbase.HConstants;
-059import 
org.apache.hadoop.hbase.TableExistsException;
-060import 
org.apache.hadoop.hbase.TableName;
-061import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-062import 
org.apache.hadoop.hbase.TableNotDisabledException;
-063import 
org.apache.hadoop.hbase.TableNotEnabledException;
-064import 
org.apache.hadoop.hbase.TableNotFoundException;
-065import 
org.apache.hadoop.hbase.UnknownRegionException;
-066import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-070import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-071import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-072import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-073import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-074import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-075import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-076import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-077import 
org.apache.hadoop.hbase.replication.ReplicationException;
-078import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-079import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-080import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-105import 
org.apache.

[06/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e9db7c5d/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyList.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyList.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyList.html
index a1d9f98..9ce2441 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyList.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyList.html
@@ -28,1446 +28,1556 @@
 020package 
org.apache.hadoop.hbase.regionserver;
 021
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertNull;
-024import static 
org.junit.Assert.assertTrue;
-025import static org.mockito.Matchers.any;
-026import static org.mockito.Mockito.spy;
-027import static 
org.mockito.Mockito.times;
-028import static 
org.mockito.Mockito.verify;
-029
-030import java.io.IOException;
-031import java.lang.ref.SoftReference;
-032import 
java.security.PrivilegedExceptionAction;
-033import java.util.ArrayList;
-034import java.util.Arrays;
-035import java.util.Collection;
-036import java.util.Collections;
-037import java.util.Iterator;
-038import java.util.List;
-039import java.util.ListIterator;
-040import java.util.NavigableSet;
-041import java.util.TreeSet;
-042import 
java.util.concurrent.ConcurrentSkipListSet;
-043import 
java.util.concurrent.CountDownLatch;
-044import 
java.util.concurrent.ExecutorService;
-045import java.util.concurrent.Executors;
-046import java.util.concurrent.TimeUnit;
-047import 
java.util.concurrent.atomic.AtomicBoolean;
-048import java.util.function.Consumer;
-049
-050import org.apache.commons.logging.Log;
-051import 
org.apache.commons.logging.LogFactory;
-052import 
org.apache.hadoop.conf.Configuration;
-053import 
org.apache.hadoop.fs.FSDataOutputStream;
-054import org.apache.hadoop.fs.FileStatus;
-055import org.apache.hadoop.fs.FileSystem;
-056import 
org.apache.hadoop.fs.FilterFileSystem;
-057import 
org.apache.hadoop.fs.LocalFileSystem;
-058import org.apache.hadoop.fs.Path;
-059import 
org.apache.hadoop.fs.permission.FsPermission;
-060import org.apache.hadoop.hbase.Cell;
-061import 
org.apache.hadoop.hbase.CellComparator;
-062import 
org.apache.hadoop.hbase.CellUtil;
-063import 
org.apache.hadoop.hbase.HBaseConfiguration;
-064import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-065import 
org.apache.hadoop.hbase.HColumnDescriptor;
-066import 
org.apache.hadoop.hbase.HConstants;
-067import 
org.apache.hadoop.hbase.HRegionInfo;
-068import 
org.apache.hadoop.hbase.HTableDescriptor;
-069import 
org.apache.hadoop.hbase.KeyValue;
-070import 
org.apache.hadoop.hbase.MemoryCompactionPolicy;
-071import 
org.apache.hadoop.hbase.TableName;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Scan;
-074import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-075import 
org.apache.hadoop.hbase.io.compress.Compression;
-076import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-077import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-078import 
org.apache.hadoop.hbase.io.hfile.HFile;
-079import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-080import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-081import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
-083import 
org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
-084import 
org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
-085import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-086import 
org.apache.hadoop.hbase.security.User;
-087import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-088import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
-089import 
org.apache.hadoop.hbase.util.Bytes;
-090import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-091import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
-092import 
org.apache.hadoop.hbase.util.FSUtils;
-093import 
org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
-094import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-095import 
org.apache.hadoop.hbase.wal.WALFactory;
-096import 
org.apache.hadoop.util.Progressable;
-097import org.junit.After;
-098import org.junit.Assert;
-099import org.junit.Before;
-100import org.junit.Rule;
-101import org.junit.Test;
-102import 
org.junit.experimental.categories.Category;
-103import org.junit.rules.TestName;
-104import org.mockito.Mockito;
-105
-106import com.google.common.collect.Lists;
-107import 
java.util.concurrent.atomic.AtomicInteger;
-108
-109/**
-110 * Test class for the Store
-111 */
-112@Category({RegionServerTests.class, 
MediumTests.class})
-113public class TestStore {
-114  private static final Log LOG

[06/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
index 6c856aa..b884e7e 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestAsyncTableAdminApi
+public class TestAsyncTableAdminApi
 extends TestAsyncAdminBase
 Class to test asynchronous table admin operations.
 
@@ -335,7 +335,7 @@ extends 
 
 name
-public org.junit.rules.TestName name
+public org.junit.rules.TestName name
 
 
 
@@ -352,7 +352,7 @@ extends 
 
 TestAsyncTableAdminApi
-public TestAsyncTableAdminApi()
+public TestAsyncTableAdminApi()
 
 
 
@@ -369,7 +369,7 @@ extends 
 
 testTableExist
-public void testTableExist()
+public void testTableExist()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -383,7 +383,7 @@ extends 
 
 testListTables
-public void testListTables()
+public void testListTables()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -397,7 +397,7 @@ extends 
 
 testGetTableDescriptor
-public void testGetTableDescriptor()
+public void testGetTableDescriptor()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -411,7 +411,7 @@ extends 
 
 testCreateTable
-public void testCreateTable()
+public void testCreateTable()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -425,7 +425,7 @@ extends 
 
 getStateFromMeta
-private org.apache.hadoop.hbase.client.TableState.State getStateFromMeta(org.apache.hadoop.hbase.TableName table)
+private org.apache.hadoop.hbase.client.TableState.State getStateFromMeta(org.apache.hadoop.hbase.TableName table)
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -439,7 +439,7 @@ extends 
 
 testCreateTableNumberOfRegions
-public void testCreateTableNumberOfRegions()
+public void testCreateTableNumberOfRegions()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -453,7 +453,7 @@ extends 
 
 testCreateTableWithRegions
-public void testCreateTableWithRegions()
+public void testCreateTableWithRegions()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -467,7 +467,7 @@ extends 
 
 verifyRoundRobinDistribution
-private void verifyRoundRobinDistribution(org.apache.hadoop.hbase.client.ClusterConnection c,
+private void verifyRoundRobinDistribution(org.apache.hadoop.hbase.client.ClusterConnection c,
   
org.apache.hadoop.hbase.client.RegionLocator regionLocator,
   int expectedRegions)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
@@ -483,7 +483,7 @@ extends 
 
 testCreateTableWithOnlyEmptyStartRow
-public void testCreateTableWithOnlyEmptyStartRow()
+public void testCreateTableWithOnlyEmptyStartRow()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -497,7 +497,7 @@ extends 
 
 testCreateTableWithEmptyRowInTheSplitKeys
-public void testCreateTableWithEmptyRowInTheSplitKeys()
+public void testCreateTableWithEmptyRowInTheSplitKeys()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -511,7 +511,7 @@ extends 
 
 testDeleteTable
-public void testDeleteTable()
+public void testDeleteTable()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Excep