[04/40] hbase-site git commit: Published site at 6d7bc0e98b25215e79f67f107fd0d3306dfcf352.

2018-09-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/738e976e/testdevapidocs/src-html/org/apache/hadoop/hbase/TestMetaTableAccessor.SpyingRpcScheduler.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestMetaTableAccessor.SpyingRpcScheduler.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestMetaTableAccessor.SpyingRpcScheduler.html
index 8c8cc19..ad0629d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestMetaTableAccessor.SpyingRpcScheduler.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestMetaTableAccessor.SpyingRpcScheduler.html
@@ -54,838 +54,854 @@
 046import 
org.apache.hadoop.hbase.ipc.DelegatingRpcScheduler;
 047import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
 048import 
org.apache.hadoop.hbase.ipc.RpcScheduler;
-049import 
org.apache.hadoop.hbase.regionserver.HRegionServer;
-050import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-051import 
org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory;
-052import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-053import 
org.apache.hadoop.hbase.testclassification.MiscTests;
-054import 
org.apache.hadoop.hbase.util.Bytes;
-055import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-056import 
org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
-057import 
org.apache.hadoop.hbase.util.Pair;
-058import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-059import org.junit.AfterClass;
-060import org.junit.Assert;
-061import org.junit.BeforeClass;
-062import org.junit.ClassRule;
-063import org.junit.Rule;
-064import org.junit.Test;
-065import 
org.junit.experimental.categories.Category;
-066import org.junit.rules.TestName;
-067import org.slf4j.Logger;
-068import org.slf4j.LoggerFactory;
-069
-070import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-071import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
-072
-073/**
-074 * Test {@link 
org.apache.hadoop.hbase.MetaTableAccessor}.
-075 */
-076@Category({MiscTests.class, 
MediumTests.class})
-077@SuppressWarnings("deprecation")
-078public class TestMetaTableAccessor {
-079
-080  @ClassRule
-081  public static final HBaseClassTestRule 
CLASS_RULE =
-082  
HBaseClassTestRule.forClass(TestMetaTableAccessor.class);
-083
-084  private static final Logger LOG = 
LoggerFactory.getLogger(TestMetaTableAccessor.class);
-085  private static final  
HBaseTestingUtility UTIL = new HBaseTestingUtility();
-086  private static Connection connection;
-087  private Random random = new Random();
-088
-089  @Rule
-090  public TestName name = new 
TestName();
-091
-092  @BeforeClass public static void 
beforeClass() throws Exception {
-093UTIL.startMiniCluster(3);
-094
-095Configuration c = new 
Configuration(UTIL.getConfiguration());
-096// Tests to 4 retries every 5 
seconds. Make it try every 1 second so more
-097// responsive.  1 second is default 
as is ten retries.
-098c.setLong("hbase.client.pause", 
1000);
-099
c.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 10);
-100connection = 
ConnectionFactory.createConnection(c);
-101  }
-102
-103  @AfterClass public static void 
afterClass() throws Exception {
-104connection.close();
-105UTIL.shutdownMiniCluster();
-106  }
-107
-108  /**
-109   * Does {@link 
MetaTableAccessor#getRegion(Connection, byte[])} and a write
-110   * against hbase:meta while its hosted 
server is restarted to prove our retrying
-111   * works.
-112   */
-113  @Test public void testRetrying()
-114  throws IOException, 
InterruptedException {
-115final TableName tableName = 
TableName.valueOf(name.getMethodName());
-116LOG.info("Started " + tableName);
-117Table t = 
UTIL.createMultiRegionTable(tableName, HConstants.CATALOG_FAMILY);
-118int regionCount = -1;
-119try (RegionLocator r = 
UTIL.getConnection().getRegionLocator(tableName)) {
-120  regionCount = 
r.getStartKeys().length;
-121}
-122// Test it works getting a region 
from just made user table.
-123final ListRegionInfo regions 
=
-124  testGettingTableRegions(connection, 
tableName, regionCount);
-125MetaTask reader = new 
MetaTask(connection, "reader") {
-126  @Override
-127  void metaTask() throws Throwable 
{
-128testGetRegion(connection, 
regions.get(0));
-129LOG.info("Read " + 
regions.get(0).getEncodedName());
-130  }
-131};
-132MetaTask writer = new 
MetaTask(connection, "writer") {
-133  @Override
-134  void metaTask() throws Throwable 
{
-135
MetaTableAccessor.addRegionToMeta(connection, regions.get(0));
-136LOG.info("Wrote " + 
regions.get(0).getEncodedName());
-137  }
-138};
-139reader.start();
-140writer.start();
-141
-142// We're gonna check how it takes. If 
it takes too long, we will consider
-143//  it as a fail. We can't put that 
in the @Test tag as we want to close
-144 

[04/40] hbase-site git commit: Published site at 2aae247e3f8f8a393b403a82593bdc3a1ba81193.

2018-09-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/80652933/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
index 3b83a16..0c894de 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
@@ -1478,960 +1478,960 @@
 1470   */
 1471  public Table createTable(TableName 
tableName, byte[][] families, byte[][] splitKeys)
 1472  throws IOException {
-1473return createTable(tableName, 
families, splitKeys, new Configuration(getConfiguration()));
+1473return createTable(tableName, 
families, splitKeys, 1, new Configuration(getConfiguration()));
 1474  }
 1475
-1476  public Table createTable(TableName 
tableName, byte[][] families,
-1477  int numVersions, byte[] startKey, 
byte[] endKey, int numRegions)
-1478  throws IOException{
-1479HTableDescriptor desc = 
createTableDescriptor(tableName, families, numVersions);
-1480
-1481getAdmin().createTable(desc, 
startKey, endKey, numRegions);
-1482// HBaseAdmin only waits for regions 
to appear in hbase:meta we
-1483// should wait until they are 
assigned
-1484
waitUntilAllRegionsAssigned(tableName);
-1485return 
getConnection().getTable(tableName);
-1486  }
-1487
-1488  /**
-1489   * Create a table.
-1490   * @param htd
-1491   * @param families
-1492   * @param c Configuration to use
-1493   * @return A Table instance for the 
created table.
-1494   * @throws IOException
-1495   */
-1496  public Table 
createTable(TableDescriptor htd, byte[][] families, Configuration c)
-1497  throws IOException {
-1498return createTable(htd, families, 
null, c);
-1499  }
-1500
-1501  /**
-1502   * Create a table.
-1503   * @param htd table descriptor
-1504   * @param families array of column 
families
-1505   * @param splitKeys array of split 
keys
-1506   * @param c Configuration to use
-1507   * @return A Table instance for the 
created table.
-1508   * @throws IOException if getAdmin or 
createTable fails
-1509   */
-1510  public Table 
createTable(TableDescriptor htd, byte[][] families, byte[][] splitKeys,
-1511  Configuration c) throws 
IOException {
-1512// Disable blooms (they are on by 
default as of 0.95) but we disable them here because
-1513// tests have hard coded counts of 
what to expect in block cache, etc., and blooms being
-1514// on is interfering.
-1515return createTable(htd, families, 
splitKeys, BloomType.NONE, HConstants.DEFAULT_BLOCKSIZE, c);
-1516  }
-1517
-1518  /**
-1519   * Create a table.
-1520   * @param htd table descriptor
-1521   * @param families array of column 
families
-1522   * @param splitKeys array of split 
keys
-1523   * @param type Bloom type
-1524   * @param blockSize block size
-1525   * @param c Configuration to use
-1526   * @return A Table instance for the 
created table.
-1527   * @throws IOException if getAdmin or 
createTable fails
-1528   */
-1529
-1530  public Table 
createTable(TableDescriptor htd, byte[][] families, byte[][] splitKeys,
-1531  BloomType type, int blockSize, 
Configuration c) throws IOException {
-1532TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(htd);
-1533for (byte[] family : families) {
-1534  builder.setColumnFamily(
-1535  
ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(type)
-1536  
.setBlocksize(blockSize).build());
-1537}
-1538TableDescriptor td = 
builder.build();
-1539getAdmin().createTable(td, 
splitKeys);
-1540// HBaseAdmin only waits for regions 
to appear in hbase:meta
-1541// we should wait until they are 
assigned
-1542
waitUntilAllRegionsAssigned(td.getTableName());
-1543return 
getConnection().getTable(td.getTableName());
-1544  }
-1545
-1546  /**
-1547   * Create a table.
-1548   * @param htd table descriptor
-1549   * @param splitRows array of split 
keys
-1550   * @return A Table instance for the 
created table.
-1551   * @throws IOException
-1552   */
-1553  public Table 
createTable(TableDescriptor htd, byte[][] splitRows)
-1554  throws IOException {
-1555getAdmin().createTable(htd, 
splitRows);
-1556// HBaseAdmin only waits for regions 
to appear in hbase:meta
-1557// we should wait until they are 
assigned
-1558
waitUntilAllRegionsAssigned(htd.getTableName());
-1559return 
getConnection().getTable(htd.getTableName());
-1560  }
-1561
-1562  /**
-1563   * Create a table.
-1564   * @param tableName
-1565   * @param families
-1566   * @param splitKeys
-1567   * @param c Configuration to use
-1568   * @return A Table instance for the 
created table.
-1569   * @throws 

[04/40] hbase-site git commit: Published site at 5fd16f38533591615aa9afa48bb89bcbd8313caf.

2018-06-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/downloads.html
--
diff --git a/downloads.html b/downloads.html
index 24b1b5e..bca92c1 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase Downloads
 
@@ -366,7 +366,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 1dd6000..b6740a2 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Export Control
@@ -331,7 +331,7 @@ for more details.
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/index.html
--
diff --git a/index.html b/index.html
index 7b9c65c..3c1b042 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Home
 
@@ -411,7 +411,7 @@ Apache HBase is an open-source, distributed, versioned, 
non-relational database
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/integration.html
--
diff --git a/integration.html b/integration.html
index 4560855..a61b456 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  CI Management
 
@@ -291,7 +291,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/issue-tracking.html
--
diff --git a/issue-tracking.html b/issue-tracking.html
index 6f7d583..3a84f0b 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Issue Management
 
@@ -288,7 +288,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/license.html
--
diff --git a/license.html b/license.html
index 66c1b23..8d83f92 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Licenses
 
@@ -491,7 +491,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/mail-lists.html
--
diff --git a/mail-lists.html b/mail-lists.html
index 77a358c..e00b930 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Mailing Lists
 
@@ -341,7 +341,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-08
+  Last Published: 
2018-06-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f0b7674/metrics.html
--
diff --git a/metrics.html b/metrics.html
index 8f7df6a..1c38c3f 100644
--- a/metrics.html
+++ b/metrics.html

[04/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.

2018-04-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
index 8c0d57c..e606e82 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
@@ -46,2582 +46,2583 @@
 038import java.util.Iterator;
 039import java.util.List;
 040
-041import org.apache.hadoop.hbase.Cell;
-042import 
org.apache.hadoop.hbase.CellComparator;
-043import 
org.apache.hadoop.hbase.KeyValue;
-044import 
org.apache.hadoop.io.RawComparator;
-045import 
org.apache.hadoop.io.WritableComparator;
-046import 
org.apache.hadoop.io.WritableUtils;
-047import 
org.apache.yetus.audience.InterfaceAudience;
-048import org.slf4j.Logger;
-049import org.slf4j.LoggerFactory;
-050
-051import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-052
-053import com.google.protobuf.ByteString;
-054
-055import sun.misc.Unsafe;
-056
-057/**
-058 * Utility class that handles byte 
arrays, conversions to/from other types,
-059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-060 * HashSets, and can be used as key in 
maps or trees.
-061 */
-062@SuppressWarnings("restriction")
-063@InterfaceAudience.Public
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068
-069  // Using the charset canonical name for 
String/byte[] conversions is much
-070  // more efficient due to use of cached 
encoders/decoders.
-071  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
-072
-073  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-074  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-075
-076  private static final Logger LOG = 
LoggerFactory.getLogger(Bytes.class);
-077
-078  /**
-079   * Size of boolean in bytes
-080   */
-081  public static final int SIZEOF_BOOLEAN 
= Byte.SIZE / Byte.SIZE;
-082
-083  /**
-084   * Size of byte in bytes
-085   */
-086  public static final int SIZEOF_BYTE = 
SIZEOF_BOOLEAN;
-087
-088  /**
-089   * Size of char in bytes
-090   */
-091  public static final int SIZEOF_CHAR = 
Character.SIZE / Byte.SIZE;
-092
-093  /**
-094   * Size of double in bytes
-095   */
-096  public static final int SIZEOF_DOUBLE = 
Double.SIZE / Byte.SIZE;
-097
-098  /**
-099   * Size of float in bytes
-100   */
-101  public static final int SIZEOF_FLOAT = 
Float.SIZE / Byte.SIZE;
-102
-103  /**
-104   * Size of int in bytes
-105   */
-106  public static final int SIZEOF_INT = 
Integer.SIZE / Byte.SIZE;
-107
-108  /**
-109   * Size of long in bytes
-110   */
-111  public static final int SIZEOF_LONG = 
Long.SIZE / Byte.SIZE;
-112
-113  /**
-114   * Size of short in bytes
-115   */
-116  public static final int SIZEOF_SHORT = 
Short.SIZE / Byte.SIZE;
-117
-118  /**
-119   * Mask to apply to a long to reveal 
the lower int only. Use like this:
-120   * int i = (int)(0xL ^ 
some_long_value);
-121   */
-122  public static final long 
MASK_FOR_LOWER_INT_IN_LONG = 0xL;
-123
-124  /**
-125   * Estimate of size cost to pay beyond 
payload in jvm for instance of byte [].
-126   * Estimate based on study of jhat and 
jprofiler numbers.
-127   */
-128  // JHat says BU is 56 bytes.
-129  // SizeOf which uses 
java.lang.instrument says 24 bytes. (3 longs?)
-130  public static final int 
ESTIMATED_HEAP_TAX = 16;
-131
-132  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-133
-134  /**
-135   * Returns length of the byte array, 
returning 0 if the array is null.
-136   * Useful for calculating sizes.
-137   * @param b byte array, which can be 
null
-138   * @return 0 if b is null, otherwise 
returns length
-139   */
-140  final public static int len(byte[] b) 
{
-141return b == null ? 0 : b.length;
-142  }
-143
-144  private byte[] bytes;
-145  private int offset;
-146  private int length;
-147
-148  /**
-149   * Create a zero-size sequence.
-150   */
-151  public Bytes() {
-152super();
-153  }
-154
-155  /**
-156   * Create a Bytes using the byte array 
as the initial value.
-157   * @param bytes This array becomes the 
backing storage for the object.
-158   */
-159  public Bytes(byte[] bytes) {
-160this(bytes, 0, bytes.length);
-161  }
-162
-163  /**
-164   * Set the new Bytes to the contents of 
the passed
-165   * codeibw/code.
-166   * @param ibw the value to set this 
Bytes to.
-167   */
-168  public Bytes(final Bytes ibw) {
-169this(ibw.get(), ibw.getOffset(), 
ibw.getLength());
-170  }
-171
-172  /**
-173   * Set the value to a given byte 
range
-174   * @param bytes the new 

[04/40] hbase-site git commit: Published site at .

2017-08-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c0fcd7f3/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.RandRsExecutor.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.RandRsExecutor.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.RandRsExecutor.html
index 785d459..62da186 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.RandRsExecutor.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.RandRsExecutor.html
@@ -251,7 +251,7 @@
 243}
 244  }
 245
-246  @Test
+246  @Ignore @Test // Disabled for now. 
Since HBASE-18551, this mock is insufficient.
 247  public void testSocketTimeout() throws 
Exception {
 248final TableName tableName = 
TableName.valueOf(this.name.getMethodName());
 249final HRegionInfo hri = 
createRegionInfo(tableName, 1);
@@ -262,580 +262,579 @@
 254rsDispatcher.setMockRsExecutor(new 
SocketTimeoutRsExecutor(20, 3));
 255
waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
 256
-257rsDispatcher.setMockRsExecutor(new 
SocketTimeoutRsExecutor(20, 3));
-258
-259
exception.expect(ServerCrashException.class);
-260
waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, false)));
-261
-262assertEquals(assignSubmittedCount + 
1, assignProcMetrics.getSubmittedCounter().getCount());
-263assertEquals(assignFailedCount, 
assignProcMetrics.getFailedCounter().getCount());
-264assertEquals(unassignSubmittedCount + 
1, unassignProcMetrics.getSubmittedCounter().getCount());
-265assertEquals(unassignFailedCount + 1, 
unassignProcMetrics.getFailedCounter().getCount());
-266  }
-267
-268  @Test
-269  public void testServerNotYetRunning() 
throws Exception {
-270
testRetriesExhaustedFailure(TableName.valueOf(this.name.getMethodName()),
-271  new 
ServerNotYetRunningRsExecutor());
-272  }
-273
-274  private void 
testRetriesExhaustedFailure(final TableName tableName,
-275  final MockRSExecutor executor) 
throws Exception {
-276final HRegionInfo hri = 
createRegionInfo(tableName, 1);
-277
-278// collect AM metrics before test
-279collectAssignmentManagerMetrics();
-280
-281// Test Assign operation failure
-282
rsDispatcher.setMockRsExecutor(executor);
-283try {
-284  
waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
-285  fail("unexpected assign 
completion");
-286} catch (RetriesExhaustedException e) 
{
-287  // expected exception
-288  LOG.info("expected exception from 
assign operation: " + e.getMessage(), e);
-289}
-290
-291// Assign the region (without 
problems)
-292rsDispatcher.setMockRsExecutor(new 
GoodRsExecutor());
-293
waitOnFuture(submitProcedure(am.createAssignProcedure(hri, false)));
-294
-295// TODO: Currently unassign just 
keeps trying until it sees a server crash.
-296// There is no count on unassign.
-297/*
-298// Test Unassign operation failure
-299
rsDispatcher.setMockRsExecutor(executor);
-300
waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, false)));
-301
-302assertEquals(assignSubmittedCount + 
2, assignProcMetrics.getSubmittedCounter().getCount());
-303assertEquals(assignFailedCount + 1, 
assignProcMetrics.getFailedCounter().getCount());
-304assertEquals(unassignSubmittedCount + 
1, unassignProcMetrics.getSubmittedCounter().getCount());
-305
-306// TODO: We supposed to have 1 failed 
assign, 1 successful assign and a failed unassign
-307// operation. But ProcV2 framework 
marks aborted unassign operation as success. Fix it!
-308assertEquals(unassignFailedCount, 
unassignProcMetrics.getFailedCounter().getCount());
-309*/
-310  }
+257rsDispatcher.setMockRsExecutor(new 
SocketTimeoutRsExecutor(20, 1));
+258// 
exception.expect(ServerCrashException.class);
+259
waitOnFuture(submitProcedure(am.createUnassignProcedure(hri, null, false)));
+260
+261assertEquals(assignSubmittedCount + 
1, assignProcMetrics.getSubmittedCounter().getCount());
+262assertEquals(assignFailedCount, 
assignProcMetrics.getFailedCounter().getCount());
+263assertEquals(unassignSubmittedCount + 
1, unassignProcMetrics.getSubmittedCounter().getCount());
+264assertEquals(unassignFailedCount + 1, 
unassignProcMetrics.getFailedCounter().getCount());
+265  }
+266
+267  @Test
+268  public void testServerNotYetRunning() 
throws Exception {
+269
testRetriesExhaustedFailure(TableName.valueOf(this.name.getMethodName()),
+270  new 
ServerNotYetRunningRsExecutor());
+271  }
+272
+273  private void 
testRetriesExhaustedFailure(final TableName tableName,
+274  final MockRSExecutor executor) 
throws Exception {
+275final HRegionInfo hri = 

[04/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e9db7c5d/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyScannerHook.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyScannerHook.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyScannerHook.html
index a1d9f98..9ce2441 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyScannerHook.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyScannerHook.html
@@ -28,1446 +28,1556 @@
 020package 
org.apache.hadoop.hbase.regionserver;
 021
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertNull;
-024import static 
org.junit.Assert.assertTrue;
-025import static org.mockito.Matchers.any;
-026import static org.mockito.Mockito.spy;
-027import static 
org.mockito.Mockito.times;
-028import static 
org.mockito.Mockito.verify;
-029
-030import java.io.IOException;
-031import java.lang.ref.SoftReference;
-032import 
java.security.PrivilegedExceptionAction;
-033import java.util.ArrayList;
-034import java.util.Arrays;
-035import java.util.Collection;
-036import java.util.Collections;
-037import java.util.Iterator;
-038import java.util.List;
-039import java.util.ListIterator;
-040import java.util.NavigableSet;
-041import java.util.TreeSet;
-042import 
java.util.concurrent.ConcurrentSkipListSet;
-043import 
java.util.concurrent.CountDownLatch;
-044import 
java.util.concurrent.ExecutorService;
-045import java.util.concurrent.Executors;
-046import java.util.concurrent.TimeUnit;
-047import 
java.util.concurrent.atomic.AtomicBoolean;
-048import java.util.function.Consumer;
-049
-050import org.apache.commons.logging.Log;
-051import 
org.apache.commons.logging.LogFactory;
-052import 
org.apache.hadoop.conf.Configuration;
-053import 
org.apache.hadoop.fs.FSDataOutputStream;
-054import org.apache.hadoop.fs.FileStatus;
-055import org.apache.hadoop.fs.FileSystem;
-056import 
org.apache.hadoop.fs.FilterFileSystem;
-057import 
org.apache.hadoop.fs.LocalFileSystem;
-058import org.apache.hadoop.fs.Path;
-059import 
org.apache.hadoop.fs.permission.FsPermission;
-060import org.apache.hadoop.hbase.Cell;
-061import 
org.apache.hadoop.hbase.CellComparator;
-062import 
org.apache.hadoop.hbase.CellUtil;
-063import 
org.apache.hadoop.hbase.HBaseConfiguration;
-064import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-065import 
org.apache.hadoop.hbase.HColumnDescriptor;
-066import 
org.apache.hadoop.hbase.HConstants;
-067import 
org.apache.hadoop.hbase.HRegionInfo;
-068import 
org.apache.hadoop.hbase.HTableDescriptor;
-069import 
org.apache.hadoop.hbase.KeyValue;
-070import 
org.apache.hadoop.hbase.MemoryCompactionPolicy;
-071import 
org.apache.hadoop.hbase.TableName;
-072import 
org.apache.hadoop.hbase.client.Get;
-073import 
org.apache.hadoop.hbase.client.Scan;
-074import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-075import 
org.apache.hadoop.hbase.io.compress.Compression;
-076import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-077import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-078import 
org.apache.hadoop.hbase.io.hfile.HFile;
-079import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-080import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-081import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
-083import 
org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
-084import 
org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
-085import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-086import 
org.apache.hadoop.hbase.security.User;
-087import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-088import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
-089import 
org.apache.hadoop.hbase.util.Bytes;
-090import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-091import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
-092import 
org.apache.hadoop.hbase.util.FSUtils;
-093import 
org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
-094import 
org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-095import 
org.apache.hadoop.hbase.wal.WALFactory;
-096import 
org.apache.hadoop.util.Progressable;
-097import org.junit.After;
-098import org.junit.Assert;
-099import org.junit.Before;
-100import org.junit.Rule;
-101import org.junit.Test;
-102import 
org.junit.experimental.categories.Category;
-103import org.junit.rules.TestName;
-104import org.mockito.Mockito;
-105
-106import com.google.common.collect.Lists;
-107import 
java.util.concurrent.atomic.AtomicInteger;
-108
-109/**
-110 * Test class for the Store
-111 */
-112@Category({RegionServerTests.class, 
MediumTests.class})
-113public class TestStore 

[04/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
index 8f41f08d..d6ee5b4 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
@@ -34,7 +34,7 @@
 026import 
org.apache.hadoop.hbase.procedure.SimpleMasterProcedureManager;
 027import 
org.apache.hadoop.hbase.procedure.SimpleRSProcedureManager;
 028import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-029import 
org.apache.hadoop.hbase.testclassification.MediumTests;
+029import 
org.apache.hadoop.hbase.testclassification.LargeTests;
 030import 
org.apache.hadoop.hbase.util.Bytes;
 031import org.junit.Assert;
 032import org.junit.BeforeClass;
@@ -45,85 +45,86 @@
 037import java.util.Map;
 038import java.util.Random;
 039
-040import static 
org.junit.Assert.assertArrayEquals;
-041import static 
org.junit.Assert.assertFalse;
-042import static 
org.junit.Assert.assertTrue;
-043
-044/**
-045 * Class to test asynchronous procedure 
admin operations.
-046 */
-047@Category({ MediumTests.class, 
ClientTests.class })
-048public class TestAsyncProcedureAdminApi 
extends TestAsyncAdminBase {
-049
-050  @BeforeClass
-051  public static void setUpBeforeClass() 
throws Exception {
-052
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_PAUSE, 10);
-053
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
3);
-054
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 1000);
-055
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 
3000);
-056
TEST_UTIL.getConfiguration().set(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY,
-057  
SimpleMasterProcedureManager.class.getName());
-058
TEST_UTIL.getConfiguration().set(ProcedureManagerHost.REGIONSERVER_PROCEDURE_CONF_KEY,
-059  
SimpleRSProcedureManager.class.getName());
-060
TEST_UTIL.getConfiguration().setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, 
true);
-061TEST_UTIL.startMiniCluster(2);
-062ASYNC_CONN = 
ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
-063  }
-064
-065  @Test
-066  public void testExecProcedure() throws 
Exception {
-067TableName tableName = 
TableName.valueOf("testExecProcedure");
-068try {
-069  Table table = 
TEST_UTIL.createTable(tableName, Bytes.toBytes("cf"));
-070  for (int i = 0; i  100; i++) 
{
-071Put put = new 
Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("cf"), null, Bytes.toBytes(i));
-072table.put(put);
-073  }
-074  // take a snapshot of the enabled 
table
-075  String snapshotString = 
"offlineTableSnapshot";
-076  MapString, String props = 
new HashMap();
-077  props.put("table", 
tableName.getNameAsString());
-078  
admin.execProcedure(SnapshotManager.ONLINE_SNAPSHOT_CONTROLLER_DESCRIPTION, 
snapshotString,
-079props).get();
-080  LOG.debug("Snapshot completed.");
-081} finally {
-082  TEST_UTIL.deleteTable(tableName);
-083}
-084  }
-085
-086  @Test
-087  public void testExecProcedureWithRet() 
throws Exception {
-088byte[] result = 
admin.execProcedureWithRet(SimpleMasterProcedureManager.SIMPLE_SIGNATURE,
-089  "myTest2", new 
HashMap()).get();
-090assertArrayEquals("Incorrect return 
data from execProcedure",
-091  
SimpleMasterProcedureManager.SIMPLE_DATA.getBytes(), result);
-092  }
-093
-094  @Test
-095  public void listProcedure() throws 
Exception {
-096ProcedureInfo[] procList = 
admin.listProcedures().get();
-097assertTrue(procList.length = 
0);
-098  }
-099
-100  @Test
-101  public void isProcedureFinished() 
throws Exception {
-102boolean failed = false;
-103try {
-104  
admin.isProcedureFinished("fake-signature", "fake-instance", new 
HashMap()).get();
-105} catch (Exception e) {
-106  failed = true;
-107}
-108Assert.assertTrue(failed);
-109  }
-110
-111  @Test
-112  public void abortProcedure() throws 
Exception {
-113Random randomGenerator = new 
Random();
-114long procId = 
randomGenerator.nextLong();
-115boolean abortResult = 
admin.abortProcedure(procId, true).get();
-116assertFalse(abortResult);
-117  }
-118}
+040import static 
org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
+041import static 
org.junit.Assert.assertArrayEquals;
+042import static 
org.junit.Assert.assertFalse;
+043import static 
org.junit.Assert.assertTrue;
+044
+045/**
+046 * Class to test asynchronous procedure 
admin operations.
+047 */
+048@Category({ LargeTests.class,